├── tests
├── __init__.py
├── test_checks_price_feed.py
└── test_checks_publisher.py
├── .python-version
├── entrypoint.sh
├── .gitignore
├── pyth_observer
├── models.py
├── check
│ ├── __init__.py
│ ├── price_feed.py
│ ├── stall_detection.py
│ └── publisher.py
├── alert_utils.py
├── health_server.py
├── coingecko.py
├── zenduty.py
├── cli.py
├── event.py
├── metrics.py
├── __init__.py
└── dispatch.py
├── sample.publishers.yaml
├── LICENSE
├── .github
├── actions
│ └── python-poetry
│ │ └── action.yml
└── workflows
│ └── build-and-push-image.yaml
├── .pre-commit-config.yaml
├── Makefile
├── pyproject.toml
├── Dockerfile
├── AGENTS.md
├── sample.config.yaml
├── README.md
├── sample.coingecko.yaml
└── scripts
└── build_coingecko_mapping.py
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.11
2 |
--------------------------------------------------------------------------------
/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | eval "exec $@"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.py[cod]
3 | *$py.class
4 | poetry.toml
5 |
6 | .DS_Store
7 | .envrc
8 | .coverage
9 |
10 | .env
11 | .vscode/
12 | .idea/
13 |
--------------------------------------------------------------------------------
/pyth_observer/models.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | from typing import Optional
3 |
4 |
5 | @dataclasses.dataclass
6 | class ContactInfo:
7 | telegram_chat_id: Optional[str] = None
8 | email: Optional[str] = None
9 | slack_channel_id: Optional[str] = None
10 |
11 |
12 | @dataclasses.dataclass
13 | class Publisher:
14 | key: str
15 | name: str
16 | contact_info: Optional[ContactInfo] = None
17 |
--------------------------------------------------------------------------------
/pyth_observer/check/__init__.py:
--------------------------------------------------------------------------------
1 | from pyth_observer.check.price_feed import (
2 | PriceFeedCheck,
3 | PriceFeedCheckConfig,
4 | PriceFeedState,
5 | )
6 | from pyth_observer.check.publisher import (
7 | PublisherCheck,
8 | PublisherCheckConfig,
9 | PublisherState,
10 | )
11 |
12 | Check = PriceFeedCheck | PublisherCheck
13 | State = PriceFeedState | PublisherState
14 | Config = PriceFeedCheckConfig | PublisherCheckConfig
15 |
--------------------------------------------------------------------------------
/sample.publishers.yaml:
--------------------------------------------------------------------------------
1 | - name: publisher1
2 | key: "FR19oB2ePko2haah8yP4fhTycxitxkVQTxk3tssxX1Ce"
3 | contact_info:
4 | # Optional fields for contact information
5 | telegram_chat_id: -4224704640
6 | email:
7 | slack_channel_id:
8 |
9 | - name: publisher2
10 | key: "DgAK7fPveidN72LCwCF4QjFcYHchBZbtZnjEAtgU1bMX"
11 | contact_info:
12 | # Optional fields for contact information
13 | telegram_chat_id: -4224704640
14 | email:
15 | slack_channel_id:
16 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2023 Pyth Contributors.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
--------------------------------------------------------------------------------
/pyth_observer/alert_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility functions for alert identification and management.
3 | """
4 |
5 | from pyth_observer.check import Check
6 | from pyth_observer.check.publisher import PublisherState
7 |
8 |
9 | def generate_alert_identifier(check: Check) -> str:
10 | """
11 | Generate a unique alert identifier for a check.
12 | This is a shared function to ensure consistency across the codebase.
13 | """
14 | alert_identifier = f"{check.__class__.__name__}-{check.state().symbol}"
15 | state = check.state()
16 | if isinstance(state, PublisherState):
17 | alert_identifier += f"-{state.publisher_name}"
18 | return alert_identifier
19 |
--------------------------------------------------------------------------------
/.github/actions/python-poetry/action.yml:
--------------------------------------------------------------------------------
1 | name: Python Poetry
2 | description: Sets up a Python environment with Poetry
3 |
4 | inputs:
5 | python-version:
6 | required: false
7 | description: Python version
8 | default: "3.10"
9 | poetry-version:
10 | required: false
11 | description: Poetry version
12 | default: "2.1.4"
13 |
14 | runs:
15 | using: composite
16 | steps:
17 | - uses: actions/setup-python@v2
18 | with:
19 | python-version: ${{ inputs.python-version }}
20 | - uses: abatilo/actions-poetry@v2.0.0
21 | with:
22 | poetry-version: ${{ inputs.poetry-version }}
23 | - run: poetry install
24 | shell: sh
25 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: local
3 | hooks:
4 | - id: isort
5 | name: isort
6 | entry: poetry run isort --profile=black .
7 | language: system
8 | - id: black
9 | name: black
10 | entry: poetry run black .
11 | pass_filenames: false
12 | language: system
13 | - id: pyright
14 | name: pyright
15 | entry: poetry run pyright pyth_observer/ tests/
16 | pass_filenames: false
17 | language: system
18 | - id: pyflakes
19 | name: pyflakes
20 | entry: poetry run pyflakes pyth_observer/ tests/
21 | pass_filenames: false
22 | language: system
23 |
--------------------------------------------------------------------------------
/pyth_observer/health_server.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import TYPE_CHECKING
3 |
4 | from aiohttp import web
5 |
6 | if TYPE_CHECKING:
7 | from aiohttp.web_request import Request
8 |
9 | observer_ready = False
10 |
11 |
12 | async def live_handler(request: "Request") -> web.Response:
13 | return web.Response(text="OK")
14 |
15 |
16 | async def ready_handler(request: "Request") -> web.Response:
17 | if observer_ready:
18 | return web.Response(text="OK")
19 | else:
20 | return web.Response(status=503, text="Not Ready")
21 |
22 |
23 | async def start_health_server(port: int = 8080) -> None:
24 | app = web.Application()
25 | app.router.add_get("/live", live_handler)
26 | app.router.add_get("/ready", ready_handler)
27 | runner = web.AppRunner(app)
28 | await runner.setup()
29 | site = web.TCPSite(runner, "0.0.0.0", port)
30 | await site.start()
31 | while True:
32 | await asyncio.sleep(3600)
33 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | _targets := setup python-version pyenv-info test cover lint run clean
2 | .PHONY: help $(_targets)
3 | .DEFAULT_GOAL := help
4 |
5 | version-python: ## Echos the version of Python in use
6 | python --version
7 |
8 | help:
9 | @echo Targets: $(_targets)
10 | @false
11 |
12 | setup:
13 | poetry install
14 |
15 | python-version:
16 | @which python
17 | @python --version
18 |
19 | pyenv-info: setup
20 | poetry env info
21 |
22 | test: setup
23 | poetry run pytest
24 |
25 | cover: setup
26 | poetry run pytest \
27 | --cov=pyth_observer \
28 | --cov-report=html \
29 | --cov-report=term
30 |
31 | lint: setup lint.python lint.yaml
32 |
33 | lint.python:
34 | poetry run isort pyth_observer/
35 | poetry run black pyth_observer/
36 | poetry run pyright pyth_observer/
37 | poetry run pyflakes pyth_observer/
38 |
39 | lint.yaml:
40 | yamllint .
41 |
42 | run: setup
43 | poetry run pyth-observer -l debug --network devnet
44 |
45 | clean:
46 | poetry env remove --all
47 | rm -rf htmlcov
48 |
--------------------------------------------------------------------------------
/tests/test_checks_price_feed.py:
--------------------------------------------------------------------------------
1 | from pythclient.market_schedule import MarketSchedule
2 | from pythclient.pythaccounts import PythPriceStatus
3 | from pythclient.solana import SolanaPublicKey
4 |
5 | from pyth_observer.check.price_feed import PriceFeedOfflineCheck, PriceFeedState
6 |
7 |
8 | def test_price_feed_offline_check():
9 | state = PriceFeedState(
10 | symbol="Crypto.BTC/USD",
11 | asset_type="Crypto",
12 | schedule=MarketSchedule("America/New_York;O,O,O,O,O,O,O;"),
13 | public_key=SolanaPublicKey("2hgu6Umyokvo8FfSDdMa9nDKhcdv9Q4VvGNhRCeSWeD3"),
14 | status=PythPriceStatus.TRADING,
15 | latest_block_slot=100,
16 | latest_trading_slot=105,
17 | price_aggregate=1000.0,
18 | confidence_interval_aggregate=10.0,
19 | coingecko_price=1005.0,
20 | coingecko_update=0,
21 | )
22 |
23 | assert PriceFeedOfflineCheck(
24 | state, {"max_slot_distance": 10, "abandoned_slot_distance": 100}
25 | ).run()
26 | assert not PriceFeedOfflineCheck(
27 | state, {"max_slot_distance": 2, "abandoned_slot_distance": 100}
28 | ).run()
29 |
--------------------------------------------------------------------------------
/pyth_observer/coingecko.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from loguru import logger
4 | from pycoingecko import CoinGeckoAPI
5 | from requests.exceptions import HTTPError
6 | from throttler import throttle
7 |
8 |
9 | # CoinGecko free API limit: 10-50 (varies) https://www.coingecko.com/en/api/pricing
10 | # However prices are updated every 1-10 minutes: https://www.coingecko.com/en/faq
11 | # Hence we only have to query once every minute.
12 | @throttle(rate_limit=1, period=10)
13 | async def get_coingecko_prices(
14 | symbol_to_ticker: Dict[str, str],
15 | ) -> Dict[str, Dict[str, Any]]:
16 | ticker_to_symbol = {v: k for k, v in symbol_to_ticker.items()}
17 | ids = list(ticker_to_symbol.keys())
18 |
19 | try:
20 | prices = CoinGeckoAPI().get_price(
21 | ids=ids, vs_currencies="usd", include_last_updated_at=True
22 | )
23 | except (ValueError, HTTPError) as exc:
24 | logger.exception(exc)
25 | logger.error(
26 | "CoinGecko API call failed - CoinGecko price comparisons not available."
27 | )
28 | prices = {}
29 |
30 | return {ticker_to_symbol[x]: prices[x] for x in prices}
31 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.mypy]
2 | python_version = "3.11"
3 | ignore_missing_imports = true
4 |
5 | [tool.poetry]
6 | name = "pyth-observer"
7 | version = "3.0.2"
8 | description = "Alerts and stuff"
9 | authors = []
10 | readme = "README.md"
11 | packages = [{ include = "pyth_observer" }]
12 |
13 | [tool.poetry.dependencies]
14 | python = "^3.10"
15 | aiohttp = "<4.0.0a1"
16 | arrow = "^1.2.3"
17 | base58 = "^2.1.1"
18 | click = "^8.1.3"
19 | datadog-api-client = { extras = ["async"], version = "^2.5.0" }
20 | loguru = "^0.6.0"
21 | more-itertools = "^9.0.0"
22 | prometheus-client = "0.15.0"
23 | pycoingecko = "2.2.0"
24 | pythclient = "^0.2.1"
25 | pyyaml = "^6.0"
26 | throttler = "1.2.1"
27 | types-pyyaml = "^6.0.12"
28 | types-pytz = "^2022.4.0.0"
29 | python-dotenv = "^1.0.1"
30 | numpy = "^2.1.3"
31 | cffi = "^1.17"
32 |
33 |
34 | [tool.poetry.group.dev.dependencies]
35 | black = "^22.10.0"
36 | ipdb = "^0.13.9"
37 | isort = "^5.10.1"
38 | pyflakes = "^2.5.0"
39 | pyright = "^1.1.278"
40 | pytest = "^7.1.3"
41 | pytest-asyncio = "^0.19.0"
42 | pytest-cov = "^4.0.0"
43 | pytest-mock = "^3.10.0"
44 |
45 | [tool.poetry.scripts]
46 | pyth-observer = "pyth_observer.cli:run"
47 |
48 | [build-system]
49 | requires = ["poetry-core"]
50 | build-backend = "poetry.core.masonry.api"
51 |
52 | [tool.poetry.requires-plugins]
53 | poetry-plugin-export = ">=1.8"
54 |
--------------------------------------------------------------------------------
/.github/workflows/build-and-push-image.yaml:
--------------------------------------------------------------------------------
1 | name: Docker Image Build
2 | on:
3 | push:
4 | tags:
5 | - v*
6 | pull_request:
7 | branches: ["main"]
8 |
9 | env:
10 | REGISTRY: ghcr.io
11 | IMAGE_NAME: ${{ github.repository }}
12 |
13 | jobs:
14 | pre-commit:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v2
18 | - uses: ./.github/actions/python-poetry
19 | - uses: pre-commit/action@v3.0.1
20 |
21 | run-tests:
22 | runs-on: ubuntu-latest
23 | steps:
24 | - uses: actions/checkout@v2
25 | - uses: ./.github/actions/python-poetry
26 | - run: poetry run pytest
27 | env:
28 | TEST_MODE: "1"
29 |
30 | build-and-push:
31 | permissions:
32 | contents: read
33 | packages: write
34 | runs-on: ubuntu-latest
35 | needs: [pre-commit, run-tests]
36 | steps:
37 | - name: Checkout repo
38 | uses: actions/checkout@v3
39 |
40 | # Set image tag to git tag, or commit hash for pull request
41 | - name: Set IMAGE_TAG
42 | run: |
43 | if [ "${{ github.event_name }}" == "push" ]; then
44 | echo "IMAGE_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
45 | else
46 | echo "IMAGE_TAG=${{ github.sha }}" >> $GITHUB_ENV
47 | fi
48 |
49 | - name: Log in to the Container registry
50 | uses: docker/login-action@v3
51 | with:
52 | registry: ${{ env.REGISTRY }}
53 | username: ${{ github.actor }}
54 | password: ${{ secrets.GITHUB_TOKEN }}
55 |
56 | - name: Extract metadata (tags, labels) for Docker
57 | id: meta
58 | uses: docker/metadata-action@v5
59 | with:
60 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
61 | tags: |
62 | type=raw,value=${{ env.IMAGE_TAG }}
63 | type=raw,value=${{ github.sha }}
64 |
65 | - name: Build and push Docker image
66 | uses: docker/build-push-action@v5
67 | with:
68 | context: .
69 | push: true
70 | tags: ${{ steps.meta.outputs.tags }}
71 | labels: ${{ steps.meta.outputs.labels }}
72 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Reference: https://bmaingret.github.io/blog/2021-11-15-Docker-and-Poetry#multi-stage-build
2 |
3 | ARG APP_NAME=pyth-observer
4 | ARG APP_PACKAGE=pyth_observer
5 | ARG APP_PATH=/opt/$APP_NAME
6 | ARG PYTHON_VERSION=3.11
7 | ARG POETRY_VERSION=2.1.4
8 |
9 | #
10 | # Stage: base
11 | #
12 |
13 | FROM python:$PYTHON_VERSION AS base
14 |
15 | ARG APP_NAME
16 | ARG APP_PATH
17 | ARG POETRY_VERSION
18 |
19 | ENV \
20 | PYTHONDONTWRITEBYTECODE=1 \
21 | PYTHONUNBUFFERED=1 \
22 | PYTHONFAULTHANDLER=1
23 | ENV \
24 | POETRY_VERSION=$POETRY_VERSION \
25 | POETRY_HOME="/opt/poetry" \
26 | POETRY_VIRTUALENVS_IN_PROJECT=true \
27 | POETRY_NO_INTERACTION=1
28 |
29 | # Install Poetry - respects $POETRY_VERSION & $POETRY_HOME
30 | RUN curl -sSL https://install.python-poetry.org | python - --version $POETRY_VERSION
31 | ENV PATH="$POETRY_HOME/bin:$PATH"
32 | RUN which poetry && poetry --version
33 |
34 | WORKDIR $APP_PATH
35 | COPY . .
36 |
37 | #
38 | # Stage: development
39 | #
40 |
41 | FROM base AS development
42 |
43 | ARG APP_NAME
44 | ARG APP_PATH
45 |
46 | WORKDIR $APP_PATH
47 | RUN poetry install
48 |
49 | ENV APP_NAME=$APP_NAME
50 |
51 | ENTRYPOINT ["poetry", "run"]
52 | CMD ["$APP_NAME"]
53 |
54 | #
55 | # Stage: build
56 | #
57 |
58 | FROM base AS build
59 |
60 | ARG APP_NAME
61 | ARG APP_PATH
62 |
63 | WORKDIR $APP_PATH
64 | RUN poetry build --format wheel
65 | RUN poetry self add poetry-plugin-export
66 | RUN poetry export --format requirements.txt --output constraints.txt --without-hashes
67 |
68 | #
69 | # Stage: production
70 | #
71 |
72 | FROM python:$PYTHON_VERSION AS production
73 |
74 | ARG APP_NAME
75 | ARG APP_PATH
76 |
77 | ENV \
78 | PYTHONDONTWRITEBYTECODE=1 \
79 | PYTHONUNBUFFERED=1 \
80 | PYTHONFAULTHANDLER=1
81 |
82 | ENV \
83 | PIP_NO_CACHE_DIR=off \
84 | PIP_DISABLE_PIP_VERSION_CHECK=on \
85 | PIP_DEFAULT_TIMEOUT=100
86 |
87 | # Get build artifact wheel and install it respecting dependency versions
88 | WORKDIR $APP_PATH
89 | COPY --from=build $APP_PATH/dist/*.whl ./
90 | COPY --from=build $APP_PATH/constraints.txt ./
91 | RUN pip install ./*.whl --requirement constraints.txt
92 |
93 | COPY ./entrypoint.sh /entrypoint.sh
94 | RUN chmod +x /entrypoint.sh
95 |
96 | ENTRYPOINT ["/entrypoint.sh"]
97 | CMD ["$APP_NAME"]
--------------------------------------------------------------------------------
/pyth_observer/zenduty.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import hashlib
3 | import os
4 | from typing import Optional
5 |
6 | import aiohttp
7 | from loguru import logger
8 |
9 | headers = {"Content-Type": "application/json"}
10 |
11 |
12 | async def send_zenduty_alert(
13 | alert_identifier: str,
14 | message: str,
15 | resolved: bool = False,
16 | summary: str = "",
17 | ) -> Optional[aiohttp.ClientResponse]:
18 | url = f"https://events.zenduty.com/api/events/{os.environ['ZENDUTY_INTEGRATION_KEY']}/"
19 | # Use a hash of the alert_identifier as a unique id for the alert.
20 | # Take the first 32 characters due to length limit of the api.
21 | entity_id = hashlib.sha256(alert_identifier.encode("utf-8")).hexdigest()[:32]
22 |
23 | alert_type = "resolved" if resolved else "critical"
24 |
25 | data = {
26 | "alert_type": alert_type,
27 | "message": message,
28 | "summary": summary,
29 | "entity_id": entity_id,
30 | }
31 |
32 | async with aiohttp.ClientSession() as session:
33 | max_retries = 5
34 | retries = 0
35 | while retries < max_retries:
36 | async with session.post(url, json=data, headers=headers) as response:
37 | if 200 <= response.status < 300:
38 | return response # Success case, return response
39 | elif response.status == 429:
40 | retries += 1
41 | if retries < max_retries:
42 | sleeptime = min(30, 2**retries)
43 | logger.error(
44 | f"Received 429 Too Many Requests for {alert_identifier}. Retrying in {sleeptime} s..."
45 | )
46 | await asyncio.sleep(
47 | sleeptime
48 | ) # Backoff before retrying, wait upto 30s
49 | else:
50 | logger.error(
51 | f"Failed to send Zenduty event message for {alert_identifier} after {max_retries} retries."
52 | )
53 | return response # Return response after max retries
54 | else:
55 | response_text = await response.text()
56 | logger.error(
57 | f"{response.status} Failed to send Zenduty event message for {alert_identifier}: {response_text}"
58 | )
59 | return response # Non-retryable failure
60 |
--------------------------------------------------------------------------------
/pyth_observer/cli.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | import sys
4 | from typing import Any, Dict
5 |
6 | import click
7 | import yaml
8 | from loguru import logger
9 | from prometheus_client import start_http_server
10 |
11 | from pyth_observer import Observer, Publisher
12 | from pyth_observer.health_server import start_health_server
13 | from pyth_observer.models import ContactInfo
14 |
15 |
16 | @click.command()
17 | @click.option(
18 | "--config",
19 | help="Path to YAML/JSON file with general config",
20 | envvar="CONFIG",
21 | required=True,
22 | )
23 | @click.option(
24 | "--publishers",
25 | help="Path to YAML/JSON file with publisher name-key associations",
26 | envvar="PUBLISHERS",
27 | required=True,
28 | )
29 | @click.option(
30 | "--coingecko-mapping",
31 | help="Path to YAML/JSON file with Coingecko mappings",
32 | envvar="COINGECKO_MAPPING",
33 | required=True,
34 | )
35 | @click.option(
36 | "--prometheus-port",
37 | help="Port number for Prometheus metrics endpoint",
38 | envvar="PROMETHEUS_PORT",
39 | default="9001",
40 | )
41 | def run(
42 | config: str, publishers: str, coingecko_mapping: str, prometheus_port: str
43 | ) -> None:
44 | config_: Dict[str, Any] = yaml.safe_load(open(config, "r")) # type: ignore[assignment]
45 | # Load publishers YAML file and convert to dictionary of Publisher instances
46 | publishers_raw: list[Dict[str, Any]] = yaml.safe_load(open(publishers, "r")) # type: ignore[assignment]
47 | publishers_: Dict[str, Publisher] = {
48 | publisher["key"]: Publisher(
49 | key=publisher["key"],
50 | name=publisher["name"],
51 | contact_info=(
52 | ContactInfo(**publisher["contact_info"])
53 | if "contact_info" in publisher
54 | else None
55 | ),
56 | )
57 | for publisher in publishers_raw
58 | }
59 | coingecko_mapping_: Dict[str, Any] = yaml.safe_load(open(coingecko_mapping, "r")) # type: ignore[assignment]
60 | observer = Observer(
61 | config_,
62 | publishers_,
63 | coingecko_mapping_,
64 | )
65 |
66 | start_http_server(int(prometheus_port))
67 |
68 | async def main() -> None:
69 | asyncio.create_task(start_health_server())
70 | await observer.run()
71 |
72 | asyncio.run(main())
73 |
74 |
75 | logger.remove()
76 | logger.add(
77 | sys.stdout,
78 | serialize=(not os.environ.get("DEV_MODE")),
79 | level=os.environ.get("LOG_LEVEL", "INFO"),
80 | )
81 |
--------------------------------------------------------------------------------
/AGENTS.md:
--------------------------------------------------------------------------------
1 | # Repository Guidelines
2 |
3 | ## Project Structure & Modules
4 | - Core code lives in `pyth_observer/`: CLI entrypoint in `cli.py`, check logic under `check/`, alert dispatchers in `dispatch.py`, event types in `event.py`, and HTTP probes in `health_server.py`.
5 | - Supporting assets and defaults: sample configs (`sample.config.yaml`, `sample.publishers.yaml`, `sample.coingecko.yaml`), Dockerfile for container builds, and helper scripts in `scripts/` (e.g., `build_coingecko_mapping.py`).
6 | - Tests are in `tests/` and mirror module names (`test_checks_price_feed.py`, `test_checks_publisher.py`).
7 |
8 | ## Setup, Build & Run
9 | - Use Python 3.11 with Poetry 2.x. Suggested bootstrap: `poetry env use $(which python)` then `poetry install`.
10 | - Common Make targets: `make setup` (install deps), `make run` (devnet run), `make test`, `make cover`, `make lint`, `make clean`.
11 | - Direct commands: `poetry run pyth-observer --config sample.config.yaml --publishers sample.publishers.yaml --coingecko-mapping sample.coingecko.yaml` to run locally; add `-l debug` for verbose logs.
12 | - CoinGecko mapping: `poetry run python scripts/build_coingecko_mapping.py -o my_mapping.json` and compare with `-e sample.coingecko.yaml` before replacing defaults.
13 |
14 | ## Testing Guidelines
15 | - Framework: `pytest`. Quick check with `poetry run pytest`; coverage report via `make cover` (writes `htmlcov/`).
16 | - Keep tests colocated under `tests/` with `test_*` naming. Prefer async tests for async code paths and mock network calls.
17 | - Add regression tests alongside new checks or dispatch paths; include sample config fragments when useful.
18 |
19 | ## Coding Style & Naming
20 | - Auto-format with `black` and import order via `isort` (run together with `make lint`). Lint also runs `pyright` and `pyflakes`.
21 | - Target Python 3.11; favor type hints on public functions and dataclasses/models. Use snake_case for functions/variables, PascalCase for classes, and uppercase for constants.
22 | - Keep config keys consistent with existing YAML samples; avoid hard-coding secrets—read from env vars.
23 |
24 | ## Commit & PR Practices
25 | - Follow the existing Conventional Commit style (`fix:`, `chore:`, `refactor!:`, etc.) seen in `git log`.
26 | - PRs should summarize behavior changes, link issues, and include reproduction or validation steps (commands run, configs used). Add screenshots only when output formatting changes.
27 | - Keep diffs small and focused; update sample config or docs when user-facing options change.
28 |
29 | ## Configuration & Security Notes
30 | - Sensitive values (API keys, tokens) must be supplied via environment variables; never commit them. Use `.env` locally and document new keys in `README.md`.
31 | - For deployments, wire liveness/readiness probes to `GET /live` and `GET /ready` on port 8080.
32 |
--------------------------------------------------------------------------------
/sample.config.yaml:
--------------------------------------------------------------------------------
1 | network:
2 | name: "pythnet"
3 | http_endpoint: "https://api2.pythnet.pyth.network"
4 | ws_endpoint: "wss://api2.pythnet.pyth.network"
5 | first_mapping: "AHtgzX45WTKfkPG53L6WYhGEXwQkN1BVknET3sVsLL8J"
6 | request_rate_limit: 10
7 | request_rate_period: 1
8 | events:
9 | - LogEvent
10 | # - DatadogEvent
11 | # - TelegramEvent
12 | # - ZendutyEvent
13 | # Alert thresholds apply to Zenduty and Telegram events
14 | # - Checks run approximately once per minute
15 | # - `alert_threshold`: number of failures within 5 minutes >= to this value trigger an alert (default: 5)
16 | # - `resolution_threshold`: number of failures within 5 minutes <= this value resolve the alert (default: 3)
17 | checks:
18 | global:
19 | # Price feed checks
20 | PriceFeedOfflineCheck:
21 | enable: true
22 | max_slot_distance: 120
23 | abandoned_slot_distance: 100000
24 | alert_threshold: 3
25 | resolution_threshold: 0
26 | PriceFeedCoinGeckoCheck:
27 | enable: true
28 | max_deviation: 5
29 | max_staleness: 60
30 | PriceFeedConfidenceIntervalCheck:
31 | enable: false
32 | # Publisher checks
33 | PublisherWithinAggregateConfidenceCheck:
34 | enable: false
35 | max_interval_distance: 20
36 | PublisherConfidenceIntervalCheck:
37 | enable: false
38 | min_confidence_interval: 0
39 | PublisherOfflineCheck:
40 | enable: false
41 | max_slot_distance: 25
42 | abandoned_slot_distance: 10000
43 | PublisherPriceCheck:
44 | enable: true
45 | max_slot_distance: 25
46 | max_aggregate_distance: 5
47 | alert_threshold: 2
48 | resolution_threshold: 1
49 | PublisherStalledCheck:
50 | enable: false
51 | stall_time_limit: 120
52 | abandoned_time_limit: 300
53 | max_slot_distance: 25
54 | noise_threshold: 0.0001
55 | min_noise_samples: 10
56 | alert_threshold: 1
57 | resolution_threshold: 0
58 | # Per-symbol config
59 | Crypto.ANC/USD:
60 | PublisherPriceCheck:
61 | enable: true
62 | max_slot_distance: 25
63 | max_aggregate_distance: 50
64 | Crypto.MIR/USD:
65 | PublisherPriceCheck:
66 | enable: true
67 | max_slot_distance: 25
68 | max_aggregate_distance: 25
69 | Crypto.MNGO/USD:
70 | PriceFeedOfflineCheck:
71 | max_slot_distance: 100000
72 | Crypto.SLND/USD:
73 | PriceFeedOfflineCheck:
74 | max_slot_distance: 100000
75 | Crypto.SNY/USD:
76 | PriceFeedOfflineCheck:
77 | max_slot_distance: 100000
78 | Crypto.PORT/USD:
79 | PriceFeedOfflineCheck:
80 | max_slot_distance: 100000
81 | FX.USD/HKD:
82 | PriceFeedOfflineCheck:
83 | max_slot_distance: 10000
84 | Crypto.ZBC/USD:
85 | PublisherPriceCheck:
86 | max_aggregate_distance: 30
87 | Crypto.BTC/USD:
88 | PublisherStalledCheck:
89 | enable: true
90 | stall_time_limit: 300 # This will override the global stall_time_limit for Crypto.BTC/USD
91 | abandoned_time_limit: 600 # This will override the global abandoned_time_limit for Crypto.BTC/USD
92 | max_slot_distance: 25
93 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pyth Observer
2 |
3 | Observe Pyth on-chain price feeds and run sanity checks on the data.
4 |
5 | ## Usage
6 |
7 | Container images are available at https://github.com/pyth-network/pyth-observer/pkgs/container/pyth-observer
8 |
9 | To run Observer locally, you will need:
10 | - Python 3.11 ([pyenv](https://github.com/pyenv/pyenv) is a nice way to manage Python installs, and once installed will automatically set the version to 3.11 for this project dir via the `.python-version` file).
11 | - [Poetry] v2.1.4 (https://python-poetry.org), which handles package and virtualenv management.
12 |
13 | Install dependencies and run the service:
14 | ```sh
15 | $ poetry env use $(which python) # point Poetry to the pyenv python shim
16 | $ poetry install
17 | $ poetry run pyth-observe --config sample.config.yaml --publishers sample.publishers.yaml --coingecko-mapping sample.coingecko.yaml
18 | ```
19 |
20 | Use `poetry run pyth-observer --help` for documentation on arguments and environment variables.
21 |
22 | To run tests, use `poetry run pytest`.
23 |
24 | ## Building CoinGecko Mapping
25 |
26 | The `scripts/build_coingecko_mapping.py` script automatically generates a CoinGecko mapping file by fetching all price feeds from the Pyth Hermes API and matching them with CoinGecko's coin list using fuzzy matching.
27 |
28 | ### Basic Usage
29 |
30 | ```sh
31 | # Generate a new mapping file
32 | poetry run python scripts/build_coingecko_mapping.py
33 |
34 | # Compare with existing mapping file
35 | poetry run python scripts/build_coingecko_mapping.py -e sample.coingecko.yaml
36 |
37 | # Specify custom output file
38 | poetry run python scripts/build_coingecko_mapping.py -o my_mapping.json
39 |
40 | # Skip price validation (faster, but less thorough)
41 | poetry run python scripts/build_coingecko_mapping.py --no-validate-prices
42 |
43 | # Adjust maximum price deviation threshold (default: 10.0%)
44 | poetry run python scripts/build_coingecko_mapping.py --max-price-deviation 5.0
45 | ```
46 |
47 | ### How It Works
48 |
49 | 1. **Fetches Pyth Price Feeds**: Retrieves all price feeds from `https://hermes.pyth.network/v2/price_feeds`
50 | 2. **Extracts Crypto Symbols**: Filters for Crypto asset types and extracts symbols (e.g., "Crypto.BTC/USD")
51 | 3. **Matches with CoinGecko**: Uses multiple matching strategies:
52 | - Exact symbol match (case-insensitive)
53 | - Fuzzy symbol matching
54 | - Fuzzy name matching based on Pyth description
55 | 4. **Validates Mappings**: Compares generated mappings against known correct mappings
56 | 5. **Validates Prices** (optional): Compares prices from Hermes and CoinGecko to detect mismatches
57 | 6. **Generates Warnings**: Flags symbols that need manual review:
58 | - Low-confidence fuzzy matches (shows similarity score)
59 | - Symbols with no matches found
60 | - Price deviations between sources
61 |
62 | ### Output
63 |
64 | The script generates a JSON file in the format:
65 | ```json
66 | {
67 | "Crypto.BTC/USD": "bitcoin",
68 | "Crypto.ETH/USD": "ethereum",
69 | ...
70 | }
71 | ```
72 |
73 | The script provides a summary showing:
74 | - Total symbols mapped
75 | - Exact matches (100% confidence)
76 | - Fuzzy matches (needs review)
77 | - No matches found
78 |
79 | Review the warnings output to manually verify and adjust any low-confidence matches before using the generated mapping file.
80 |
81 | ## Configuration
82 |
83 | See `sample.config.yaml` for configuration options.
84 |
85 | Event types are configured via environment variables:
86 |
87 | - `DatadogEvent`
88 |
89 | - `DATADOG_EVENT_SITE` - Division where Datadog account is registered
90 | - `DATADOG_EVENT_API_KEY` - API key used to send requests to Datadog API
91 |
92 | - `LogEvent`
93 | - `LOG_EVENT_LEVEL` - Level to log messages at
94 |
95 | - `TelegramEvent`
96 | - `TELEGRAM_BOT_TOKEN` - API token for the Telegram bot
97 | - `OPEN_ALERTS_FILE` - Path to local file used for persisting open alerts
98 |
99 | - `ZendutyEvent`
100 | - `ZENDUTY_INTEGRATION_KEY` - Integration key for Zenduty service API integration
101 | - `OPEN_ALERTS_FILE` - Path to local file used for persisting open alerts
102 |
103 | ### Alert Thresholds
104 | - Alert thresholds apply to ZendutyEvent and TelegramEvent (resolution only applies to zenduty)
105 | - Checks run approximately once per minute.
106 | - These thresholds can be overridden per check type in config.yaml
107 | - `alert_threshold`: number of failures in 5 minutes >= to this value trigger an alert (default: 5)
108 | - `resolution_threshold`: number of failures in 5 minutes <= this value resolve the alert (default: 3)
109 |
110 | ## Finding the Telegram Group Chat ID
111 |
112 | To integrate Telegram events with the Observer, you need the Telegram group chat ID. Here's how you can find it:
113 |
114 | 1. Open [Telegram Web](https://web.telegram.org).
115 | 2. Navigate to the group chat for which you need the ID.
116 | 3. Look at the URL in the browser's address bar; it should look something like `https://web.telegram.org/a/#-1111111111`.
117 | 4. The group chat ID is the number in the URL, including the `-` sign if present (e.g., `-1111111111`).
118 |
119 | Use this ID in the `publishers.yaml` configuration to correctly set up Telegram events.
120 |
121 | ## Health Endpoints
122 |
123 | The Observer exposes HTTP endpoints for health checks, suitable for Kubernetes liveness and readiness probes:
124 |
125 | - **Liveness probe**: `GET /live` always returns `200 OK` with body `OK`.
126 | - **Readiness probe**: `GET /ready` returns `200 OK` with body `OK` if the observer is ready, otherwise returns `503 Not Ready`.
127 |
128 | By default, these endpoints are served on port 8080. You can use them in your Kubernetes deployment to monitor the application's health.
129 |
--------------------------------------------------------------------------------
/pyth_observer/check/price_feed.py:
--------------------------------------------------------------------------------
1 | import time
2 | from dataclasses import dataclass
3 | from datetime import datetime
4 | from typing import Any, Dict, Optional, Protocol, runtime_checkable
5 | from zoneinfo import ZoneInfo
6 |
7 | from pythclient.market_schedule import MarketSchedule
8 | from pythclient.pythaccounts import PythPriceStatus
9 | from pythclient.solana import SolanaPublicKey
10 |
11 |
12 | @dataclass
13 | class PriceFeedState:
14 | symbol: str
15 | asset_type: str
16 | schedule: MarketSchedule
17 | public_key: SolanaPublicKey
18 | status: PythPriceStatus
19 | latest_block_slot: int
20 | latest_trading_slot: int
21 | price_aggregate: float
22 | confidence_interval_aggregate: float
23 | coingecko_price: Optional[float]
24 | coingecko_update: Optional[int]
25 |
26 |
27 | PriceFeedCheckConfig = Dict[str, str | float | int | bool]
28 |
29 |
30 | @runtime_checkable
31 | class PriceFeedCheck(Protocol):
32 | def __init__(self, state: PriceFeedState, config: PriceFeedCheckConfig) -> None:
33 | ...
34 |
35 | def state(self) -> PriceFeedState:
36 | ...
37 |
38 | def run(self) -> bool:
39 | ...
40 |
41 | def error_message(self) -> Dict[str, Any]:
42 | ...
43 |
44 |
45 | class PriceFeedOfflineCheck(PriceFeedCheck):
46 | def __init__(self, state: PriceFeedState, config: PriceFeedCheckConfig) -> None:
47 | self.__state = state
48 | self.__max_slot_distance: int = int(config["max_slot_distance"])
49 | self.__abandoned_slot_distance: int = int(config["abandoned_slot_distance"])
50 |
51 | def state(self) -> PriceFeedState:
52 | return self.__state
53 |
54 | def run(self) -> bool:
55 | market_open = self.__state.schedule.is_market_open(
56 | datetime.now(ZoneInfo("America/New_York")),
57 | )
58 |
59 | # Skip if market is not open
60 | if not market_open:
61 | return True
62 |
63 | distance = abs(
64 | self.__state.latest_block_slot - self.__state.latest_trading_slot
65 | )
66 |
67 | # Pass if distance is less than max slot distance
68 | if distance < self.__max_slot_distance:
69 | return True
70 |
71 | # Pass if price has been stale for a long time
72 | if distance > self.__abandoned_slot_distance:
73 | return True
74 |
75 | # Fail
76 | return False
77 |
78 | def error_message(self) -> Dict[str, Any]:
79 | distance = self.__state.latest_block_slot - self.__state.latest_trading_slot
80 | return {
81 | "msg": f"{self.__state.symbol} is offline (either non-trading/stale). Last update {distance} slots ago.",
82 | "type": "PriceFeedOfflineCheck",
83 | "symbol": self.__state.symbol,
84 | "latest_trading_slot": self.__state.latest_trading_slot,
85 | "block_slot": self.__state.latest_block_slot,
86 | }
87 |
88 |
89 | class PriceFeedCoinGeckoCheck(PriceFeedCheck):
90 | def __init__(self, state: PriceFeedState, config: PriceFeedCheckConfig) -> None:
91 | self.__state = state
92 | self.__max_deviation: float = float(config["max_deviation"]) # Percentage
93 | self.__max_staleness: int = int(config["max_staleness"]) # Seconds
94 |
95 | def state(self) -> PriceFeedState:
96 | return self.__state
97 |
98 | def run(self) -> bool:
99 | # Skip if no CoinGecko price
100 | if not self.__state.coingecko_price or not self.__state.coingecko_update:
101 | return True
102 |
103 | # Skip if stale CoinGecko price
104 | if self.__state.coingecko_update + self.__max_staleness < time.time():
105 | return True
106 |
107 | # Skip if not trading
108 | if self.__state.status != PythPriceStatus.TRADING:
109 | return True
110 |
111 | # Skip if CoinGecko price is zero
112 | if self.__state.coingecko_price == 0:
113 | return True
114 |
115 | deviation = (
116 | abs(self.__state.price_aggregate - self.__state.coingecko_price)
117 | / self.__state.coingecko_price
118 | )
119 |
120 | # Pass if deviation is less than max deviation
121 | if deviation < self.__max_deviation:
122 | return True
123 |
124 | # Fail
125 | return False
126 |
127 | def error_message(self) -> Dict[str, Any]:
128 | return {
129 | "msg": f"{self.__state.symbol} is too far from Coingecko's price.",
130 | "type": "PriceFeedCoinGeckoCheck",
131 | "symbol": self.__state.symbol,
132 | "pyth_price": self.__state.price_aggregate,
133 | "coingecko_price": self.__state.coingecko_price,
134 | }
135 |
136 |
137 | class PriceFeedConfidenceIntervalCheck(PriceFeedCheck):
138 | def __init__(self, state: PriceFeedState, config: PriceFeedCheckConfig) -> None:
139 | self.__state = state
140 | self.__min_confidence_interval: int = int(config["min_confidence_interval"])
141 |
142 | def state(self) -> PriceFeedState:
143 | return self.__state
144 |
145 | def run(self) -> bool:
146 | # Skip if not trading
147 | if self.__state.status != PythPriceStatus.TRADING:
148 | return True
149 |
150 | # Pass if confidence interval is greater than zero
151 | if self.__state.confidence_interval_aggregate > self.__min_confidence_interval:
152 | return True
153 |
154 | # Fail
155 | return False
156 |
157 | def error_message(self) -> Dict[str, Any]:
158 | return {
159 | "msg": f"{self.__state.symbol} confidence interval is too low.",
160 | "type": "PriceFeedConfidenceIntervalCheck",
161 | "symbol": self.__state.symbol,
162 | "confidence_interval": self.__state.confidence_interval_aggregate,
163 | }
164 |
165 |
166 | PRICE_FEED_CHECKS = [
167 | PriceFeedCoinGeckoCheck,
168 | PriceFeedConfidenceIntervalCheck,
169 | PriceFeedOfflineCheck,
170 | ]
171 |
--------------------------------------------------------------------------------
/pyth_observer/check/stall_detection.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import List, Optional
3 |
4 | import numpy as np
5 |
6 | from pyth_observer.check.publisher import PriceUpdate
7 |
8 |
9 | @dataclass
10 | class StallDetectionResult:
11 | """Results from stall detection analysis."""
12 |
13 | is_stalled: bool
14 | stall_type: Optional[
15 | str
16 | ] # 'exact' for identical values, 'noisy' for artificial noise
17 | base_price: Optional[float]
18 | noise_magnitude: Optional[float]
19 | duration: float # how long the price has been stalled
20 | confidence: float
21 |
22 | @classmethod
23 | def no_stall(cls) -> "StallDetectionResult":
24 | """Create a StallDetectionResult instance indicating no stall detected."""
25 | return cls(
26 | is_stalled=False,
27 | stall_type=None,
28 | base_price=None,
29 | noise_magnitude=None,
30 | duration=0.0,
31 | confidence=1.0,
32 | )
33 |
34 |
35 | class StallDetector:
36 | """
37 | Detects price staleness by identifying both exact price repeats and artificial noise patterns.
38 |
39 | The detection strategy is based on the intuition that meaningful price movements must exceed
40 | some minimum relative threshold. If a price very slightly fluctuates but doesn't exceed the
41 | `noise_threshold` within `stall_time_limit`, then it's likely that it's just a static price
42 | with artificial noise thrown in.
43 |
44 | Detection Logic:
45 | 1. Exact Stalls: Prices within `stall_time_limit` are exactly equal (within float precision)
46 | 2. Noisy Stalls: All price variations stay within a tiny relative `noise_threshold` (default 0.01%)
47 | for longer than `stall_time_limit`.
48 |
49 | The `noise_threshold` (default 1e-4 or 0.01%) strategy is chosen because:
50 | - Real price movements, even for very stable symbols, should exceed this threshold.
51 | - Hard to circumvent. Avoiding detection would require larger variations, impacting the publisher's
52 | price accuracy versus the aggregate.
53 | - The threshold is relative to the base price, making it work across different price scales.
54 | - It works across different noise patterns (random, sine wave, structured, etc.)
55 |
56 | Example:
57 | - A $100 base price with all variations within ±$0.01 (0.01%) for 2+ minutes is likely stalled
58 | - Natural price movements would occasionally exceed this tiny threshold
59 | - Variations this small consistently over time suggest artificial noise
60 | """
61 |
62 | def __init__(
63 | self,
64 | stall_time_limit: float,
65 | noise_threshold: float = 1e-4,
66 | min_noise_samples: int = 5,
67 | ) -> None:
68 | """
69 | Initialize stall detector.
70 |
71 | Args:
72 | stall_time_limit: Time in seconds before price is considered stalled
73 | noise_threshold: Maximum relative noise magnitude (e.g., 1e-4 for 0.01%)
74 | min_noise_updates: Minimum number of updates needed for noise detection
75 | (doesn't apply to exact stall detection)
76 | """
77 | self.stall_time_limit = stall_time_limit
78 | self.noise_threshold = noise_threshold
79 | self.min_noise_samples = min_noise_samples
80 |
81 | def analyze_updates(
82 | self, updates: List[PriceUpdate], cur_update: PriceUpdate
83 | ) -> StallDetectionResult:
84 | """
85 | Assumes that the cache has been recently updated since it takes the latest
86 | cached timestamp as the current time.
87 |
88 | Args:
89 | updates: List of price updates to analyze
90 | cur_update: The update currently being processed. If it's a repeated price,
91 | the update won't be in `updates`, so we need it as a separate parameter.
92 |
93 | Returns:
94 | StallDetectionResult with detection details
95 | """
96 | # Need at least 1 sample
97 | if not updates:
98 | return StallDetectionResult.no_stall()
99 |
100 | ## Check for exact stall
101 |
102 | # The latest 2 updates are sufficient to detect an exact stall
103 | duration = cur_update.timestamp - updates[-1].timestamp
104 | if duration <= self.stall_time_limit:
105 | return StallDetectionResult.no_stall()
106 | elif cur_update.price == updates[-1].price:
107 | return StallDetectionResult(
108 | is_stalled=True,
109 | stall_type="exact",
110 | base_price=cur_update.price,
111 | noise_magnitude=0.0,
112 | duration=duration,
113 | confidence=1.0,
114 | )
115 |
116 | ## Check for stalled price with artificial noise added in
117 |
118 | # Calculate relative deviations from base price
119 | prices = np.array([u.price for u in updates])
120 | base_price = np.median(prices)
121 |
122 | if base_price == 0:
123 | # Avoid division by zero
124 | return StallDetectionResult.no_stall()
125 |
126 | relative_deviations = np.abs(prices - base_price) / abs(base_price)
127 | max_relative_deviation = np.max(relative_deviations)
128 |
129 | # Check for artificial noise (variations below threshold)
130 | if len(updates) < self.min_noise_samples:
131 | # We need multiple samples to detect noise, pass until we have enough
132 | return StallDetectionResult.no_stall()
133 |
134 | if max_relative_deviation <= self.noise_threshold:
135 | confidence = 1.0 - (max_relative_deviation / self.noise_threshold)
136 | return StallDetectionResult(
137 | is_stalled=True,
138 | stall_type="noisy",
139 | base_price=float(base_price),
140 | noise_magnitude=float(max_relative_deviation * base_price),
141 | duration=duration,
142 | confidence=float(confidence),
143 | )
144 |
145 | return StallDetectionResult(
146 | is_stalled=False,
147 | stall_type=None,
148 | base_price=float(base_price),
149 | noise_magnitude=float(max_relative_deviation * base_price),
150 | duration=duration,
151 | confidence=0.0,
152 | )
153 |
--------------------------------------------------------------------------------
/pyth_observer/event.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Dict, Protocol, TypedDict, cast
3 |
4 | import aiohttp
5 | from datadog_api_client.api_client import AsyncApiClient as DatadogAPI
6 | from datadog_api_client.configuration import Configuration as DatadogConfig
7 | from datadog_api_client.v1.api.events_api import EventsApi as DatadogEventAPI
8 | from datadog_api_client.v1.model.event_alert_type import EventAlertType
9 | from datadog_api_client.v1.model.event_create_request import EventCreateRequest
10 | from dotenv import load_dotenv
11 | from loguru import logger
12 |
13 | from pyth_observer.alert_utils import generate_alert_identifier
14 | from pyth_observer.check import Check
15 | from pyth_observer.check.price_feed import PriceFeedState
16 | from pyth_observer.check.publisher import PublisherCheck, PublisherState
17 | from pyth_observer.models import Publisher
18 | from pyth_observer.zenduty import send_zenduty_alert
19 |
20 | load_dotenv()
21 |
22 |
23 | class Context(TypedDict):
24 | network: str
25 | publishers: Dict[str, Publisher]
26 |
27 |
28 | class Event(Protocol):
29 | check: Check
30 | context: Context
31 |
32 | async def send(self) -> None:
33 | ...
34 |
35 |
36 | class DatadogEvent(Event):
37 | def __init__(self, check: Check, context: Context) -> None:
38 | self.check = check
39 | self.context = context
40 |
41 | async def send(self) -> None:
42 | # Publisher checks expect the key -> name mapping of publishers when
43 | # generating the error title/message.
44 | event_content = self.check.error_message()
45 | event_title = event_content["msg"]
46 | event_text = ""
47 | for key, value in event_content.items():
48 | event_text += f"{key}: {value}\n"
49 |
50 | # An example is: PriceFeedOfflineCheck-Crypto.AAVE/USD
51 | aggregation_key = f"{self.check.__class__.__name__}-{self.check.state().symbol}"
52 |
53 | if self.check.__class__.__bases__ == (PublisherCheck,):
54 | # Add publisher key to the aggregation key to separate different faulty publishers
55 | # An example would be: PublisherPriceCheck-Crypto.AAVE/USD-9TvAYCUkGajRXs....
56 | aggregation_key += "-" + self.check.state().public_key.key
57 |
58 | event = EventCreateRequest(
59 | aggregation_key=aggregation_key,
60 | title=event_title,
61 | text=event_text,
62 | tags=[
63 | "service:observer",
64 | f"network:{self.context['network']}",
65 | f"symbol:{self.check.state().symbol}",
66 | f"check:{self.check.__class__.__name__}",
67 | ],
68 | alert_type=EventAlertType.WARNING,
69 | source_type_name="my_apps",
70 | )
71 |
72 | # Cast the event to EventCreateRequest explicitly because pyright complains that the previous line returns UnparsedObject | Unknown | None
73 | event = cast(EventCreateRequest, event)
74 |
75 | # This assumes that DD_API_KEY and DD_SITE env. variables are set. Also,
76 | # using the async API makes the events api return a coroutine, so we
77 | # ignore the pyright warning.
78 |
79 | server_variables = {"site": os.environ["DATADOG_EVENT_SITE"]}
80 | api_key = {"apiKeyAuth": os.environ["DATADOG_EVENT_API_KEY"]}
81 | config = DatadogConfig(api_key=api_key, server_variables=server_variables)
82 |
83 | async with DatadogAPI(config) as api:
84 | response = await DatadogEventAPI(api).create_event(
85 | body=event
86 | ) # pyright: ignore
87 |
88 | if response.status != "ok":
89 | raise RuntimeError(
90 | f"Failed to send Datadog event (status: {response.status})"
91 | )
92 |
93 |
94 | class LogEvent(Event):
95 | def __init__(self, check: Check, context: Context) -> None:
96 | self.check = check
97 | self.context = context
98 |
99 | async def send(self) -> None:
100 | # Publisher checks expect the key -> name mapping of publishers when
101 | # generating the error title/message.
102 | event = self.check.error_message()
103 | with logger.contextualize(**event):
104 | logger.info(event["msg"])
105 |
106 |
107 | class TelegramEvent(Event):
108 | def __init__(self, check: Check, context: Context) -> None:
109 | self.check = check
110 | self.context = context
111 | self.telegram_bot_token = os.environ["TELEGRAM_BOT_TOKEN"]
112 |
113 | async def send(self) -> None:
114 | if self.check.__class__.__bases__ == (PublisherCheck,):
115 | text = self.check.error_message()
116 | publisher_key = self.check.state().public_key.key
117 | publisher = self.context["publishers"].get(publisher_key, None)
118 | # Ensure publisher is not None and has contact_info before accessing telegram_chat_id
119 | chat_id = (
120 | publisher.contact_info.telegram_chat_id
121 | if publisher is not None and publisher.contact_info is not None
122 | else None
123 | )
124 |
125 | if chat_id is None:
126 | logger.warning(
127 | f"Telegram chat ID not found for publisher key {publisher_key}"
128 | )
129 | return
130 |
131 | telegram_api_url = (
132 | f"https://api.telegram.org/bot{self.telegram_bot_token}/sendMessage"
133 | )
134 |
135 | formatted_message = ""
136 | for key, value in text.items():
137 | formatted_message += (
138 | f"*{key.capitalize().replace('_', ' ')}:* {value}\n"
139 | )
140 |
141 | message_data = {
142 | "chat_id": chat_id,
143 | "text": formatted_message,
144 | "parse_mode": "Markdown",
145 | }
146 |
147 | async with aiohttp.ClientSession() as session:
148 | async with session.post(
149 | telegram_api_url, json=message_data
150 | ) as response:
151 | if response.status != 200:
152 | response_text = await response.text()
153 | logger.error(
154 | f"Failed to send Telegram message: {response_text}"
155 | )
156 |
157 |
158 | class ZendutyEvent(Event):
159 | def __init__(self, check: Check, context: Context) -> None:
160 | self.check = check
161 | self.context = context
162 |
163 | async def send(self) -> None:
164 | event_details = self.check.error_message()
165 | summary = ""
166 | for key, value in event_details.items():
167 | summary += f"{key}: {value}\n"
168 |
169 | alert_identifier = generate_alert_identifier(self.check)
170 | state = self.check.state()
171 | if isinstance(state, PublisherState):
172 | symbol = (
173 | self.check.state().symbol.replace(".", "-").replace("/", "-").lower()
174 | )
175 | cluster = (
176 | "solana-mainnet-beta"
177 | if self.context["network"] == "mainnet"
178 | else self.context["network"]
179 | )
180 | publisher_key = state.public_key.key
181 | summary += f"https://legacy.pyth.network/metrics?price-feed={symbol}&cluster={cluster}&publisher={publisher_key}\n"
182 | elif isinstance(state, PriceFeedState):
183 | symbol = (
184 | self.check.state().symbol.replace(".", "-").replace("/", "-").lower()
185 | )
186 | summary += f"https://legacy.pyth.network/price-feeds/{symbol}\n"
187 |
188 | logger.debug(f"Sending Zenduty alert for {alert_identifier}")
189 | await send_zenduty_alert(
190 | alert_identifier=alert_identifier, message=alert_identifier, summary=summary
191 | )
192 |
--------------------------------------------------------------------------------
/pyth_observer/metrics.py:
--------------------------------------------------------------------------------
1 | import time
2 | from contextlib import contextmanager
3 | from typing import Any, Dict, Optional
4 |
5 | from prometheus_client import (
6 | REGISTRY,
7 | CollectorRegistry,
8 | Counter,
9 | Gauge,
10 | Histogram,
11 | Info,
12 | )
13 | from pythclient.pythaccounts import PythPriceStatus
14 |
15 | from pyth_observer.check.price_feed import PriceFeedState
16 |
17 |
18 | class PythObserverMetrics:
19 | def __init__(self, registry: CollectorRegistry = REGISTRY):
20 | self.registry = registry
21 |
22 | self.observer_info = Info(
23 | "pyth_observer_info",
24 | "Information about the Pyth Observer instance",
25 | registry=registry,
26 | )
27 |
28 | self.check_execution_duration = Histogram(
29 | "pyth_observer_check_execution_duration_seconds",
30 | "Time spent executing checks",
31 | ["check_type"],
32 | buckets=[0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 30.0],
33 | registry=registry,
34 | )
35 |
36 | self.loop_errors_total = Counter(
37 | "pyth_observer_loop_errors_total",
38 | "Total number of errors in observation loop",
39 | ["error_type"],
40 | registry=registry,
41 | )
42 |
43 | self.price_feeds_processed = Gauge(
44 | "pyth_observer_price_feeds_processed_total",
45 | "Total number of price feeds processed in last cycle",
46 | registry=registry,
47 | )
48 |
49 | self.price_feed_status = Gauge(
50 | "pyth_observer_price_feed_status",
51 | "Status of price feeds (1=trading, 0=not trading)",
52 | ["symbol", "asset_type"],
53 | registry=registry,
54 | )
55 |
56 | self.price_feed_staleness = Gauge(
57 | "pyth_observer_price_feed_staleness_slots",
58 | "Number of slots since last price update",
59 | ["symbol", "asset_type"],
60 | registry=registry,
61 | )
62 |
63 | self.price_feed_confidence_interval = Gauge(
64 | "pyth_observer_price_feed_confidence_interval",
65 | "Price feed confidence interval",
66 | ["symbol", "asset_type"],
67 | registry=registry,
68 | )
69 |
70 | self.check_success_rate = Gauge(
71 | "pyth_observer_check_success_rate",
72 | "Success rate of checks (0-1)",
73 | ["check_type", "symbol"],
74 | registry=registry,
75 | )
76 |
77 | self.price_deviation_from_coingecko = Gauge(
78 | "pyth_observer_price_deviation_from_coingecko_percent",
79 | "Price deviation from CoinGecko as percentage",
80 | ["symbol"],
81 | registry=registry,
82 | )
83 |
84 | self.coingecko_price_age = Gauge(
85 | "pyth_observer_coingecko_price_age_seconds",
86 | "Age of CoinGecko price data in seconds",
87 | ["symbol"],
88 | registry=registry,
89 | )
90 |
91 | self.publishers_active = Gauge(
92 | "pyth_observer_publishers_active_total",
93 | "Number of active publishers for a symbol",
94 | ["symbol", "asset_type"],
95 | registry=registry,
96 | )
97 |
98 | self.alerts_active = Gauge(
99 | "pyth_observer_alerts_active_total",
100 | "Number of currently active alerts",
101 | ["alert_type"],
102 | registry=registry,
103 | )
104 |
105 | self.alerts_sent_total = Counter(
106 | "pyth_observer_alerts_sent_total",
107 | "Total number of alerts sent",
108 | ["alert_type", "channel"],
109 | registry=registry,
110 | )
111 |
112 | self.api_request_duration = Histogram(
113 | "pyth_observer_api_request_duration_seconds",
114 | "Duration of external API requests",
115 | ["service", "endpoint"],
116 | buckets=[0.1, 0.5, 1.0, 2.0, 5.0, 10.0],
117 | registry=registry,
118 | )
119 |
120 | self.api_request_total = Counter(
121 | "pyth_observer_api_requests_total",
122 | "Total number of API requests",
123 | ["service", "endpoint", "status"],
124 | registry=registry,
125 | )
126 |
127 | self.api_rate_limit_hits = Counter(
128 | "pyth_observer_api_rate_limit_hits_total",
129 | "Number of times rate limits were hit",
130 | ["service"],
131 | registry=registry,
132 | )
133 |
134 | self.latest_block_slot = Gauge(
135 | "pyth_observer_latest_block_slot",
136 | "Latest Solana block slot observed",
137 | registry=registry,
138 | )
139 |
140 | self.network_connection_status = Gauge(
141 | "pyth_observer_network_connection_status",
142 | "Network connection status (1=connected, 0=disconnected)",
143 | ["network", "endpoint_type"],
144 | registry=registry,
145 | )
146 |
147 | def set_observer_info(self, network: str, config: Dict[str, Any]):
148 | """Set static information about the observer instance."""
149 | self.observer_info.info(
150 | {
151 | "network": network,
152 | "checks_enabled": str(
153 | len(
154 | [
155 | c
156 | for c in config.get("checks", {}).get("global", {})
157 | if config["checks"]["global"][c].get("enable", False)
158 | ]
159 | )
160 | ),
161 | "event_handlers": ",".join(config.get("events", [])),
162 | }
163 | )
164 |
165 | @contextmanager
166 | def time_operation(self, metric: Histogram, **labels):
167 | start_time = time.time()
168 | try:
169 | yield
170 | finally:
171 | duration = time.time() - start_time
172 | metric.labels(**labels).observe(duration)
173 |
174 | def update_price_feed_metrics(self, state: PriceFeedState) -> None:
175 | labels = {"symbol": state.symbol, "asset_type": state.asset_type}
176 |
177 | status_value = 1 if state.status == PythPriceStatus.TRADING else 0
178 | self.price_feed_status.labels(**labels).set(status_value)
179 |
180 | staleness = state.latest_block_slot - state.latest_trading_slot
181 | self.price_feed_staleness.labels(**labels).set(staleness)
182 |
183 | self.price_feed_confidence_interval.labels(**labels).set(
184 | state.confidence_interval_aggregate
185 | )
186 |
187 | if state.coingecko_price:
188 | deviation = (
189 | abs(state.price_aggregate - state.coingecko_price)
190 | / state.coingecko_price
191 | * 100
192 | )
193 | self.price_deviation_from_coingecko.labels(symbol=state.symbol).set(
194 | deviation
195 | )
196 |
197 | if state.coingecko_update:
198 | age = time.time() - state.coingecko_update
199 | self.coingecko_price_age.labels(symbol=state.symbol).set(age)
200 |
201 | self.latest_block_slot.set(state.latest_block_slot)
202 |
203 | def record_api_request(
204 | self,
205 | service: str,
206 | endpoint: str,
207 | duration: float,
208 | status_code: int,
209 | rate_limited: bool = False,
210 | ):
211 | status = "success" if 200 <= status_code < 300 else "error"
212 |
213 | self.api_request_duration.labels(service=service, endpoint=endpoint).observe(
214 | duration
215 | )
216 | self.api_request_total.labels(
217 | service=service, endpoint=endpoint, status=status
218 | ).inc()
219 |
220 | if rate_limited:
221 | self.api_rate_limit_hits.labels(service=service).inc()
222 |
223 | def update_alert_metrics(
224 | self, active_alerts: Dict[str, Any], sent_alert: Optional[str] = None
225 | ):
226 | alert_counts = {}
227 | for alert_id, alert_info in active_alerts.items():
228 | alert_type = alert_info.get("type", "unknown")
229 | alert_counts[alert_type] = alert_counts.get(alert_type, 0) + 1
230 |
231 | for alert_type, count in alert_counts.items():
232 | self.alerts_active.labels(alert_type=alert_type).set(count)
233 |
234 | if sent_alert:
235 | alert_type = sent_alert.split("-")[0]
236 | self.alerts_sent_total.labels(
237 | alert_type=alert_type, channel="configured"
238 | ).inc()
239 |
240 | def set_network_status(self, network: str, endpoint_type: str, connected: bool):
241 | status = 1 if connected else 0
242 | self.network_connection_status.labels(
243 | network=network, endpoint_type=endpoint_type
244 | ).set(status)
245 |
246 |
247 | metrics = PythObserverMetrics()
248 |
--------------------------------------------------------------------------------
/pyth_observer/__init__.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | from typing import Any, Dict, List, Literal, Tuple
4 |
5 | from loguru import logger
6 | from pythclient.market_schedule import MarketSchedule
7 | from pythclient.pythaccounts import PythProductAccount
8 | from pythclient.pythclient import PythClient
9 | from pythclient.solana import (
10 | SOLANA_DEVNET_HTTP_ENDPOINT,
11 | SOLANA_DEVNET_WS_ENDPOINT,
12 | SOLANA_MAINNET_HTTP_ENDPOINT,
13 | SOLANA_MAINNET_WS_ENDPOINT,
14 | SOLANA_TESTNET_HTTP_ENDPOINT,
15 | SOLANA_TESTNET_WS_ENDPOINT,
16 | )
17 | from throttler import Throttler
18 |
19 | import pyth_observer.health_server as health_server
20 | from pyth_observer.check import State
21 | from pyth_observer.check.price_feed import PriceFeedState
22 | from pyth_observer.check.publisher import PublisherState
23 | from pyth_observer.coingecko import get_coingecko_prices
24 | from pyth_observer.dispatch import Dispatch
25 | from pyth_observer.metrics import metrics
26 | from pyth_observer.models import Publisher
27 |
28 | PYTHTEST_HTTP_ENDPOINT = "https://api.pythtest.pyth.network/"
29 | PYTHTEST_WS_ENDPOINT = "wss://api.pythtest.pyth.network/"
30 | PYTHNET_HTTP_ENDPOINT = "https://pythnet.rpcpool.com/"
31 | PYTHNET_WS_ENDPOINT = "wss://pythnet.rpcpool.com/"
32 |
33 | ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
34 |
35 |
36 | def get_solana_urls(
37 | network: Literal["devnet", "testnet", "mainnet", "pythtest", "pythnet"]
38 | ) -> Tuple[str, str]:
39 | """
40 | Helper for getting the correct urls for the PythClient
41 | """
42 | mapping = {
43 | "devnet": (SOLANA_DEVNET_HTTP_ENDPOINT, SOLANA_DEVNET_WS_ENDPOINT),
44 | "testnet": (SOLANA_TESTNET_HTTP_ENDPOINT, SOLANA_TESTNET_WS_ENDPOINT),
45 | "mainnet": (SOLANA_MAINNET_HTTP_ENDPOINT, SOLANA_MAINNET_WS_ENDPOINT),
46 | "pythtest": (PYTHTEST_HTTP_ENDPOINT, PYTHTEST_WS_ENDPOINT),
47 | "pythnet": (PYTHNET_HTTP_ENDPOINT, PYTHNET_WS_ENDPOINT),
48 | }
49 | return mapping[network]
50 |
51 |
52 | class Observer:
53 | def __init__(
54 | self,
55 | config: Dict[str, Any],
56 | publishers: Dict[str, Publisher],
57 | coingecko_mapping: Dict[str, str],
58 | ) -> None:
59 | self.config = config
60 | self.dispatch = Dispatch(config, publishers)
61 | self.publishers = publishers
62 | self.pyth_client = PythClient(
63 | solana_endpoint=config["network"]["http_endpoint"],
64 | solana_ws_endpoint=config["network"]["ws_endpoint"],
65 | first_mapping_account_key=config["network"]["first_mapping"],
66 | )
67 | self.pyth_throttler = Throttler(
68 | rate_limit=int(config["network"]["request_rate_limit"]),
69 | period=float(config["network"]["request_rate_period"]),
70 | )
71 | self.coingecko_mapping = coingecko_mapping
72 |
73 | metrics.set_observer_info(
74 | network=config["network"]["name"],
75 | config=config,
76 | )
77 |
78 | async def run(self) -> None:
79 | # global states
80 | states: List[State] = []
81 | while True:
82 | try:
83 | logger.info("Running checks")
84 |
85 | products = await self.get_pyth_products()
86 | coingecko_prices, coingecko_updates = await self.get_coingecko_prices()
87 | await self.refresh_all_pyth_prices()
88 |
89 | logger.info("Refreshed all state: products, coingecko, pyth")
90 |
91 | health_server.observer_ready = True
92 |
93 | processed_feeds = 0
94 | active_publishers_by_symbol: Dict[str, Dict[str, Any]] = {}
95 |
96 | for product in products:
97 | # Skip tombstone accounts with blank metadata
98 | if "symbol" not in product.attrs:
99 | continue
100 |
101 | if not product.first_price_account_key:
102 | continue
103 |
104 | # For each product, we build a list of price feed states (one
105 | # for each price account) and a list of publisher states (one
106 | # for each publisher).
107 | states: List[State] = []
108 | price_accounts = product.prices
109 |
110 | for _, price_account in price_accounts.items():
111 | # Handle potential None for min_publishers
112 | if (
113 | price_account.min_publishers is None
114 | # When min_publishers is high it means that the price is not production-ready
115 | # yet and it is still being tested. We need no alerting for these prices.
116 | or price_account.min_publishers >= 10
117 | ):
118 | continue
119 |
120 | # Ensure latest_block_slot is not None or provide a default value
121 | latest_block_slot = (
122 | price_account.slot if price_account.slot is not None else -1
123 | )
124 |
125 | if not price_account.aggregate_price_status:
126 | raise RuntimeError("Price account status is missing")
127 |
128 | if not price_account.aggregate_price_info:
129 | raise RuntimeError("Aggregate price info is missing")
130 |
131 | price_feed_state = PriceFeedState(
132 | symbol=product.attrs["symbol"],
133 | asset_type=product.attrs["asset_type"],
134 | schedule=MarketSchedule(product.attrs["schedule"]),
135 | public_key=price_account.key,
136 | status=price_account.aggregate_price_status,
137 | # this is the solana block slot when price account was fetched
138 | latest_block_slot=latest_block_slot,
139 | latest_trading_slot=price_account.last_slot,
140 | price_aggregate=price_account.aggregate_price_info.price,
141 | confidence_interval_aggregate=price_account.aggregate_price_info.confidence_interval,
142 | coingecko_price=coingecko_prices.get(
143 | product.attrs["symbol"]
144 | ),
145 | coingecko_update=coingecko_updates.get(
146 | product.attrs["symbol"]
147 | ),
148 | )
149 |
150 | states.append(price_feed_state)
151 | processed_feeds += 1
152 |
153 | metrics.update_price_feed_metrics(price_feed_state)
154 |
155 | symbol = product.attrs["symbol"]
156 | if symbol not in active_publishers_by_symbol:
157 | active_publishers_by_symbol[symbol] = {
158 | "count": 0,
159 | "asset_type": product.attrs["asset_type"],
160 | }
161 |
162 | for component in price_account.price_components:
163 | pub = self.publishers.get(component.publisher_key.key, None)
164 | publisher_name = (
165 | (pub.name if pub else "")
166 | + f" ({component.publisher_key.key})"
167 | ).strip()
168 |
169 | publisher_state = PublisherState(
170 | publisher_name=publisher_name,
171 | symbol=product.attrs["symbol"],
172 | asset_type=product.attrs["asset_type"],
173 | schedule=MarketSchedule(product.attrs["schedule"]),
174 | public_key=component.publisher_key,
175 | confidence_interval=component.latest_price_info.confidence_interval,
176 | confidence_interval_aggregate=price_account.aggregate_price_info.confidence_interval,
177 | price=component.latest_price_info.price,
178 | price_aggregate=price_account.aggregate_price_info.price,
179 | slot=component.latest_price_info.pub_slot,
180 | aggregate_slot=price_account.last_slot,
181 | # this is the solana block slot when price account was fetched
182 | latest_block_slot=latest_block_slot,
183 | status=component.latest_price_info.price_status,
184 | aggregate_status=price_account.aggregate_price_status,
185 | )
186 |
187 | states.append(publisher_state)
188 | active_publishers_by_symbol[symbol]["count"] += 1
189 |
190 | metrics.price_feeds_processed.set(processed_feeds)
191 |
192 | for symbol, info in active_publishers_by_symbol.items():
193 | metrics.publishers_active.labels(
194 | symbol=symbol, asset_type=info["asset_type"]
195 | ).set(info["count"])
196 |
197 | await self.dispatch.run(states)
198 |
199 | except Exception as e:
200 | logger.exception(f"Error in run loop: {repr(e)}")
201 | if "product" in locals():
202 | logger.error(
203 | f"Product attrs during error: {product.attrs}" # pyright: ignore[reportPossiblyUnboundVariable]
204 | )
205 | health_server.observer_ready = False
206 | metrics.loop_errors_total.labels(error_type=type(e).__name__).inc()
207 | await asyncio.sleep(5)
208 |
209 | async def get_pyth_products(self) -> List[PythProductAccount]:
210 | logger.debug("Fetching Pyth product accounts...")
211 |
212 | try:
213 | async with self.pyth_throttler:
214 | with metrics.time_operation(
215 | metrics.api_request_duration, service="pyth", endpoint="products"
216 | ):
217 | result = await self.pyth_client.refresh_products()
218 | metrics.api_request_total.labels(
219 | service="pyth", endpoint="products", status="success"
220 | ).inc()
221 | return result
222 | except Exception:
223 | metrics.api_request_total.labels(
224 | service="pyth", endpoint="products", status="error"
225 | ).inc()
226 | raise
227 |
228 | async def refresh_all_pyth_prices(self) -> None:
229 | """Refresh all Pyth prices once for all products."""
230 | logger.debug("Refreshing all Pyth price accounts...")
231 |
232 | try:
233 | async with self.pyth_throttler:
234 | with metrics.time_operation(
235 | metrics.api_request_duration, service="pyth", endpoint="prices"
236 | ):
237 | await self.pyth_client.refresh_all_prices()
238 | metrics.api_request_total.labels(
239 | service="pyth", endpoint="prices", status="success"
240 | ).inc()
241 | except Exception:
242 | metrics.api_request_total.labels(
243 | service="pyth", endpoint="prices", status="error"
244 | ).inc()
245 | raise
246 |
247 | async def get_coingecko_prices(
248 | self,
249 | ) -> Tuple[Dict[str, float], Dict[str, int]]:
250 | logger.debug("Fetching CoinGecko prices...")
251 |
252 | try:
253 | with metrics.time_operation(
254 | metrics.api_request_duration, service="coingecko", endpoint="prices"
255 | ):
256 | data = await get_coingecko_prices(self.coingecko_mapping)
257 | metrics.api_request_total.labels(
258 | service="coingecko", endpoint="prices", status="success"
259 | ).inc()
260 | except Exception:
261 | metrics.api_request_total.labels(
262 | service="coingecko", endpoint="prices", status="error"
263 | ).inc()
264 | raise
265 |
266 | prices: Dict[str, float] = {}
267 | updates: Dict[str, int] = {} # Unix timestamps
268 |
269 | for symbol in data:
270 | if "usd" not in data[symbol] or "last_updated_at" not in data[symbol]:
271 | logger.warning(
272 | f"CoinGecko data for {symbol} doesn't include `usd` and/or `last_updated_at`. CoinGecko returned: {data[symbol]}."
273 | )
274 | continue
275 |
276 | prices[symbol] = data[symbol]["usd"]
277 | updates[symbol] = data[symbol]["last_updated_at"]
278 |
279 | return (prices, updates)
280 |
--------------------------------------------------------------------------------
/pyth_observer/dispatch.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import os
4 | from copy import deepcopy
5 | from datetime import datetime, timedelta
6 | from typing import Any, Awaitable, Dict, List, Optional, TypedDict
7 |
8 | from loguru import logger
9 |
10 | from pyth_observer.alert_utils import generate_alert_identifier
11 | from pyth_observer.check import Check, State
12 | from pyth_observer.check.price_feed import PRICE_FEED_CHECKS, PriceFeedState
13 | from pyth_observer.check.publisher import PUBLISHER_CHECKS, PublisherState
14 | from pyth_observer.event import DatadogEvent # Used dynamically
15 | from pyth_observer.event import LogEvent # Used dynamically
16 | from pyth_observer.event import TelegramEvent # Used dynamically
17 | from pyth_observer.event import Context, Event, ZendutyEvent
18 | from pyth_observer.metrics import metrics
19 | from pyth_observer.models import Publisher
20 | from pyth_observer.zenduty import send_zenduty_alert
21 |
22 | assert DatadogEvent
23 | assert LogEvent
24 | assert TelegramEvent
25 | assert ZendutyEvent
26 |
27 |
28 | class AlertInfo(TypedDict):
29 | """
30 | Information about an open alert tracked for threshold-based alerting.
31 |
32 | Fields:
33 | type: The check class name (e.g., "PublisherOfflineCheck")
34 | window_start: ISO format datetime string marking the start of the current 5-minute window
35 | failures: Number of failures in the current 5-minute window
36 | last_window_failures: Number of failures in the previous 5-minute window (None if no previous window)
37 | sent: Whether an alert has been sent for this issue
38 | last_alert: ISO format datetime string of when the last alert was sent (None if never sent)
39 | """
40 |
41 | type: str
42 | window_start: str
43 | failures: int
44 | last_window_failures: Optional[int]
45 | sent: bool
46 | last_alert: Optional[str]
47 |
48 |
49 | class Dispatch:
50 | """
51 | Load configuration for each check/state pair, run the check, and run
52 | notifiers for the checks that failed.
53 | """
54 |
55 | def __init__(
56 | self, config: Dict[str, Any], publishers: Dict[str, Publisher]
57 | ) -> None:
58 | self.config = config
59 | self.publishers = publishers
60 | self.open_alerts: Dict[str, AlertInfo] = {}
61 | if "ZendutyEvent" in self.config["events"]:
62 | self.open_alerts_file = os.environ["OPEN_ALERTS_FILE"]
63 | self.open_alerts = self.load_alerts()
64 | # below is used to store events to later send if mutilple failures occur
65 | # events cannot be stored in open_alerts as they are not JSON serializable.
66 | self.delayed_events: Dict[str, Event] = {}
67 |
68 | def load_alerts(self) -> Dict[str, AlertInfo]:
69 | try:
70 | with open(self.open_alerts_file, "r") as file:
71 | loaded = json.load(file)
72 | # Ensure all required fields are present
73 | for alert_id, alert in loaded.items():
74 | if "last_window_failures" not in alert:
75 | alert["last_window_failures"] = None
76 | if "last_alert" not in alert:
77 | alert["last_alert"] = None
78 | return loaded # type: ignore[return-value]
79 | except FileNotFoundError:
80 | return {} # Return an empty dict if the file doesn't exist
81 |
82 | async def run(self, states: List[State]) -> None:
83 | # First, run each check and store the ones that failed
84 | failed_checks: List[Check] = []
85 |
86 | for state in states:
87 | if isinstance(state, PriceFeedState):
88 | failed_checks.extend(self.check_price_feed(state))
89 | elif isinstance(state, PublisherState):
90 | failed_checks.extend(self.check_publisher(state))
91 | else:
92 | raise RuntimeError("Unknown state")
93 |
94 | # Then, wrap each failed check in events and send them
95 | sent_events: List[Awaitable[None]] = []
96 | context = Context(
97 | network=self.config["network"]["name"], publishers=self.publishers
98 | )
99 |
100 | current_time = datetime.now()
101 | for check in failed_checks:
102 | for event_type in self.config["events"]:
103 | event: Event = globals()[event_type](check, context)
104 |
105 | if event_type in ["ZendutyEvent", "TelegramEvent"]:
106 | alert_identifier = generate_alert_identifier(check)
107 | alert = self.open_alerts.get(alert_identifier)
108 | if alert is None:
109 | self.open_alerts[alert_identifier] = {
110 | "type": check.__class__.__name__,
111 | "window_start": current_time.isoformat(),
112 | "failures": 1,
113 | "last_window_failures": None,
114 | "sent": False,
115 | "last_alert": None,
116 | }
117 | else:
118 | # Check window status before incrementing to avoid losing current run's failures
119 | self.check_zd_alert_status(alert_identifier, current_time)
120 | alert["failures"] += 1
121 | # Always update delayed_events with the latest event to ensure we send
122 | # the most recent error information when the alert is finally sent
123 | self.delayed_events[f"{event_type}-{alert_identifier}"] = event
124 | continue # Skip sending immediately for ZendutyEvent or TelegramEvent
125 |
126 | sent_events.append(event.send())
127 |
128 | await asyncio.gather(*sent_events)
129 |
130 | metrics.update_alert_metrics(self.open_alerts)
131 |
132 | if "ZendutyEvent" in self.config["events"]:
133 | await self.process_zenduty_events(current_time)
134 |
135 | def check_price_feed(self, state: PriceFeedState) -> List[Check]:
136 | failed_checks: List[Check] = []
137 | total_checks = 0
138 | passed_checks = 0
139 |
140 | for check_class in PRICE_FEED_CHECKS:
141 | config = self.load_config(check_class.__name__, state.symbol)
142 |
143 | if config["enable"]:
144 | total_checks += 1
145 | check = check_class(state, config)
146 |
147 | with metrics.time_operation(
148 | metrics.check_execution_duration, check_type=check_class.__name__
149 | ):
150 | check_passed = check.run()
151 |
152 | if check_passed:
153 | passed_checks += 1
154 | else:
155 | failed_checks.append(check)
156 |
157 | if total_checks > 0:
158 | success_rate = passed_checks / total_checks
159 | metrics.check_success_rate.labels(
160 | check_type="price_feed", symbol=state.symbol
161 | ).set(success_rate)
162 |
163 | return failed_checks
164 |
165 | def check_publisher(self, state: PublisherState) -> List[Check]:
166 | failed_checks: List[Check] = []
167 | total_checks = 0
168 | passed_checks = 0
169 |
170 | for check_class in PUBLISHER_CHECKS:
171 | config = self.load_config(check_class.__name__, state.symbol)
172 |
173 | if config["enable"]:
174 | total_checks += 1
175 | check = check_class(state, config)
176 |
177 | with metrics.time_operation(
178 | metrics.check_execution_duration, check_type=check_class.__name__
179 | ):
180 | check_passed = check.run()
181 |
182 | if check_passed:
183 | passed_checks += 1
184 | else:
185 | failed_checks.append(check)
186 |
187 | if total_checks > 0:
188 | success_rate = passed_checks / total_checks
189 | metrics.check_success_rate.labels(
190 | check_type="publisher", symbol=state.symbol
191 | ).set(success_rate)
192 |
193 | return failed_checks
194 |
195 | def load_config(self, check_name: str, symbol: str) -> Dict[str, Any]:
196 | config = deepcopy(self.config["checks"]["global"][check_name])
197 |
198 | if symbol in self.config["checks"]:
199 | if check_name in self.config["checks"][symbol]:
200 | config |= self.config["checks"][symbol][check_name]
201 |
202 | return config
203 |
204 | # Zenduty Functions
205 |
206 | def check_zd_alert_status(
207 | self, alert_identifier: str, current_time: datetime
208 | ) -> None:
209 | alert = self.open_alerts.get(alert_identifier)
210 | if alert is not None:
211 | # Reset the failure count if 5m has elapsed
212 | if current_time - datetime.fromisoformat(
213 | alert["window_start"]
214 | ) >= timedelta(minutes=5):
215 | alert["window_start"] = current_time.isoformat()
216 | alert["last_window_failures"] = alert["failures"]
217 | alert["failures"] = 0
218 |
219 | async def process_zenduty_events(self, current_time: datetime) -> None:
220 | to_remove = []
221 | to_alert = []
222 |
223 | for identifier, info in self.open_alerts.items():
224 | # Check window status (idempotent - safe to call multiple times)
225 | # This handles alerts that didn't have failures in the current run
226 | self.check_zd_alert_status(identifier, current_time)
227 | check_config = self.config["checks"]["global"][info["type"]]
228 | alert_threshold = check_config.get("alert_threshold", 5)
229 | resolution_threshold = check_config.get("resolution_threshold", 3)
230 | # Resolve the alert if raised and failed <= $threshold times in the last 5m window
231 | # OR if the current window has low failures (for immediate resolution)
232 | resolved = False
233 | # Check if last window had low failures
234 | last_window_resolved = (
235 | info["last_window_failures"] is not None
236 | and info["last_window_failures"] <= resolution_threshold
237 | )
238 | if last_window_resolved:
239 | logger.debug(f"Resolving Zenduty alert {identifier}")
240 | resolved = True
241 |
242 | if info["sent"]:
243 | response = await send_zenduty_alert(
244 | identifier, identifier, resolved=True
245 | )
246 | if response and 200 <= response.status < 300:
247 | to_remove.append(identifier)
248 | metrics.alerts_sent_total.labels(
249 | alert_type=info["type"], channel="zenduty"
250 | ).inc()
251 | else:
252 | to_remove.append(identifier)
253 | # Raise alert if failed > $threshold times within the last 5m window
254 | # or if already alerted and not yet resolved.
255 | # Re-alert at the start of each hour but not more often.
256 | elif (
257 | info["failures"] >= alert_threshold or (info["sent"] and not resolved)
258 | ) and (
259 | info["last_alert"] is None # First alert - send immediately
260 | or ( # Subsequent alerts - send at the start of each hour
261 | current_time - datetime.fromisoformat(info["last_alert"])
262 | > timedelta(minutes=5)
263 | and current_time.minute == 0 # Only alert at the start of each hour
264 | )
265 | ):
266 | logger.debug(f"Raising Zenduty alert {identifier}")
267 | self.open_alerts[identifier]["sent"] = True
268 | self.open_alerts[identifier]["last_alert"] = current_time.isoformat()
269 | # Only send events for event types that are actually enabled
270 | for event_type in self.config["events"]:
271 | if event_type in ["ZendutyEvent", "TelegramEvent"]:
272 | key = f"{event_type}-{identifier}"
273 | event = self.delayed_events.get(key)
274 | if event:
275 | to_alert.append(event.send())
276 | metrics.alerts_sent_total.labels(
277 | alert_type=info["type"],
278 | channel=event_type.lower().replace("event", ""),
279 | ).inc()
280 |
281 | # Send the alerts that were delayed due to thresholds
282 | await asyncio.gather(*to_alert)
283 |
284 | # Remove alerts that have been resolved
285 | for identifier in to_remove:
286 | if self.open_alerts.get(identifier):
287 | del self.open_alerts[identifier]
288 | # Only clean up delayed_events for event types that are actually enabled
289 | for event_type in self.config["events"]:
290 | if event_type in ["ZendutyEvent", "TelegramEvent"]:
291 | key = f"{event_type}-{identifier}"
292 | if self.delayed_events.get(key):
293 | del self.delayed_events[key]
294 |
295 | metrics.update_alert_metrics(self.open_alerts)
296 |
297 | with open(self.open_alerts_file, "w") as file:
298 | json.dump(self.open_alerts, file)
299 |
--------------------------------------------------------------------------------
/pyth_observer/check/publisher.py:
--------------------------------------------------------------------------------
1 | import time
2 | from collections import defaultdict, deque
3 | from dataclasses import asdict, dataclass
4 | from datetime import datetime
5 | from typing import Any, Dict, Protocol, runtime_checkable
6 | from zoneinfo import ZoneInfo
7 |
8 | from loguru import logger
9 | from pythclient.market_schedule import MarketSchedule
10 | from pythclient.pythaccounts import PythPriceStatus
11 | from pythclient.solana import SolanaPublicKey
12 |
13 |
14 | @dataclass
15 | class PriceUpdate:
16 | """Represents a single price with its timestamp (epoch seconds)."""
17 |
18 | timestamp: int
19 | price: float
20 |
21 |
22 | PUBLISHER_EXCLUSION_DISTANCE = 25
23 | PUBLISHER_CACHE_MAX_LEN = 30
24 | """Roughly 30 mins of updates, since the check runs about once a minute"""
25 |
26 | PUBLISHER_CACHE = defaultdict(lambda: deque(maxlen=PUBLISHER_CACHE_MAX_LEN))
27 | """
28 | Cache that holds tuples of (price, timestamp) for publisher/feed combos as they stream in.
29 | Entries longer than `PUBLISHER_CACHE_MAX_LEN` are automatically pruned.
30 | Used by the PublisherStalledCheck to detect stalls in prices.
31 | """
32 |
33 |
34 | @dataclass
35 | class PublisherState:
36 | publisher_name: str
37 | symbol: str
38 | asset_type: str
39 | schedule: MarketSchedule
40 | public_key: SolanaPublicKey
41 | status: PythPriceStatus
42 | aggregate_status: PythPriceStatus
43 | slot: int
44 | aggregate_slot: int
45 | latest_block_slot: int
46 | price: float
47 | price_aggregate: float
48 | confidence_interval: float
49 | confidence_interval_aggregate: float
50 |
51 |
52 | PublisherCheckConfig = Dict[str, str | float | int | bool]
53 |
54 |
55 | @runtime_checkable
56 | class PublisherCheck(Protocol):
57 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
58 | ...
59 |
60 | def state(self) -> PublisherState:
61 | ...
62 |
63 | def run(self) -> bool:
64 | ...
65 |
66 | def error_message(self) -> Dict[str, Any]:
67 | ...
68 |
69 |
70 | class PublisherWithinAggregateConfidenceCheck(PublisherCheck):
71 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
72 | self.__state = state
73 | self.__max_interval_distance: float = float(config["max_interval_distance"])
74 |
75 | def state(self) -> PublisherState:
76 | return self.__state
77 |
78 | def run(self) -> bool:
79 | # Skip if not trading
80 | if self.__state.status != PythPriceStatus.TRADING:
81 | return True
82 |
83 | # Skip if aggregate is not trading
84 | if self.__state.aggregate_status != PythPriceStatus.TRADING:
85 | return True
86 |
87 | # Skip if confidence interval is zero
88 | if self.__state.confidence_interval == 0:
89 | return True
90 |
91 | # Pass if publisher slot is far from aggregate slot
92 | distance = abs(self.__state.slot - self.__state.aggregate_slot)
93 | if distance > PUBLISHER_EXCLUSION_DISTANCE:
94 | return True
95 |
96 | diff = self.__state.price - self.__state.price_aggregate
97 |
98 | # Skip if confidence interval aggregate is zero
99 | if self.__state.confidence_interval_aggregate == 0:
100 | return True
101 |
102 | intervals_away = abs(diff / self.__state.confidence_interval_aggregate)
103 |
104 | # Pass if price diff is less than max interval distance
105 | if intervals_away < self.__max_interval_distance:
106 | return True
107 |
108 | # Fail
109 | return False
110 |
111 | def error_message(self) -> Dict[str, Any]:
112 | diff = self.__state.price - self.__state.price_aggregate
113 | if self.__state.confidence_interval_aggregate == 0:
114 | intervals_away = abs(diff)
115 | else:
116 | intervals_away = abs(diff / self.__state.confidence_interval_aggregate)
117 |
118 | return {
119 | "msg": f"{self.__state.publisher_name} price is {intervals_away} times away from confidence.",
120 | "type": "PublisherWithinAggregateConfidenceCheck",
121 | "publisher": self.__state.publisher_name,
122 | "symbol": self.__state.symbol,
123 | "publisher_price": f"{self.__state.price} ± {self.__state.confidence_interval}",
124 | "aggregate_price": f"{self.__state.price_aggregate} ± {self.__state.confidence_interval_aggregate}",
125 | }
126 |
127 |
128 | class PublisherConfidenceIntervalCheck(PublisherCheck):
129 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
130 | self.__state = state
131 | self.__min_confidence_interval: int = int(config["min_confidence_interval"])
132 |
133 | def state(self) -> PublisherState:
134 | return self.__state
135 |
136 | def run(self) -> bool:
137 | # Skip if not trading
138 | if self.__state.status != PythPriceStatus.TRADING:
139 | return True
140 |
141 | # Pass if publisher slot is far from aggregate slot
142 | distance = abs(self.__state.slot - self.__state.aggregate_slot)
143 | if distance > PUBLISHER_EXCLUSION_DISTANCE:
144 | return True
145 |
146 | # Pass if confidence interval is greater than min_confidence_interval
147 | if self.__state.confidence_interval > self.__min_confidence_interval:
148 | return True
149 |
150 | # Fail
151 | return False
152 |
153 | def error_message(self) -> Dict[str, Any]:
154 | return {
155 | "msg": f"{self.__state.publisher_name} confidence interval is too tight.",
156 | "type": "PublisherConfidenceIntervalCheck",
157 | "publisher": self.__state.publisher_name,
158 | "symbol": self.__state.symbol,
159 | "price": self.__state.price,
160 | "confidence_interval": self.__state.confidence_interval,
161 | }
162 |
163 |
164 | class PublisherOfflineCheck(PublisherCheck):
165 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
166 | self.__state = state
167 | self.__max_slot_distance: int = int(config["max_slot_distance"])
168 | self.__abandoned_slot_distance: int = int(config["abandoned_slot_distance"])
169 |
170 | def state(self) -> PublisherState:
171 | return self.__state
172 |
173 | def run(self) -> bool:
174 | market_open = self.__state.schedule.is_market_open(
175 | datetime.now(ZoneInfo("America/New_York")),
176 | )
177 |
178 | if not market_open:
179 | return True
180 |
181 | distance = self.__state.latest_block_slot - self.__state.slot
182 |
183 | # Pass if publisher slot is not too far from aggregate slot
184 | if distance < self.__max_slot_distance:
185 | return True
186 |
187 | # Pass if publisher has been inactive for a long time
188 | if distance > self.__abandoned_slot_distance:
189 | return True
190 |
191 | # Fail
192 | return False
193 |
194 | def error_message(self) -> Dict[str, Any]:
195 | distance = self.__state.latest_block_slot - self.__state.slot
196 | return {
197 | "msg": f"{self.__state.publisher_name} hasn't published recently for {distance} slots.",
198 | "type": "PublisherOfflineCheck",
199 | "publisher": self.__state.publisher_name,
200 | "symbol": self.__state.symbol,
201 | "publisher_slot": self.__state.slot,
202 | "aggregate_slot": self.__state.aggregate_slot,
203 | }
204 |
205 |
206 | class PublisherPriceCheck(PublisherCheck):
207 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
208 | self.__state = state
209 | self.__max_aggregate_distance: float = float(
210 | config["max_aggregate_distance"]
211 | ) # %
212 | self.__max_slot_distance: int = int(config["max_slot_distance"]) # Slots
213 |
214 | def state(self) -> PublisherState:
215 | return self.__state
216 |
217 | def run(self) -> bool:
218 | # Skip if aggregate status is not trading
219 | if self.__state.aggregate_status != PythPriceStatus.TRADING:
220 | return True
221 |
222 | # Skip if not trading
223 | if self.__state.status != PythPriceStatus.TRADING:
224 | return True
225 |
226 | # Skip if publisher is too far behind
227 | slot_diff = abs(self.__state.slot - self.__state.aggregate_slot)
228 | if slot_diff > self.__max_slot_distance:
229 | return True
230 |
231 | # Skip if published price is zero
232 | if self.__state.price == 0 or self.__state.price_aggregate == 0:
233 | return True
234 |
235 | deviation = (self.ci_adjusted_price_diff() / self.__state.price_aggregate) * 100
236 |
237 | # Pass if deviation is less than max distance
238 | if deviation <= self.__max_aggregate_distance:
239 | return True
240 |
241 | # Fail
242 | return False
243 |
244 | def error_message(self) -> Dict[str, Any]:
245 | if self.__state.price_aggregate == 0:
246 | deviation = self.ci_adjusted_price_diff()
247 | else:
248 | deviation = (
249 | self.ci_adjusted_price_diff() / self.__state.price_aggregate
250 | ) * 100
251 |
252 | return {
253 | "msg": f"{self.__state.publisher_name} price is too far from aggregate price.",
254 | "type": "PublisherPriceCheck",
255 | "publisher": self.__state.publisher_name,
256 | "symbol": self.__state.symbol,
257 | "publisher_price": f"{self.__state.price} ± {self.__state.confidence_interval}",
258 | "aggregate_price": f"{self.__state.price_aggregate} ± {self.__state.confidence_interval_aggregate}",
259 | "deviation": f"{deviation:.2f}%",
260 | }
261 |
262 | # Returns the distance between the aggregate price and the closest side of the publisher's confidence interval
263 | # Returns 0 if the aggregate price is within the publisher's confidence interval.
264 | def ci_adjusted_price_diff(self) -> float:
265 | price_only_diff = abs(self.__state.price - self.__state.price_aggregate)
266 | return max(price_only_diff - self.__state.confidence_interval, 0)
267 |
268 |
269 | class PublisherStalledCheck(PublisherCheck):
270 | def __init__(self, state: PublisherState, config: PublisherCheckConfig) -> None:
271 | self.__state = state
272 | self.__stall_time_limit: int = int(
273 | config["stall_time_limit"]
274 | ) # Time in seconds
275 | self.__abandoned_time_limit: int = int(config["abandoned_time_limit"])
276 | self.__max_slot_distance: int = int(config["max_slot_distance"])
277 |
278 | from pyth_observer.check.stall_detection import ( # noqa: deferred import to avoid circular import
279 | StallDetector,
280 | )
281 |
282 | self.__detector = StallDetector(
283 | stall_time_limit=self.__stall_time_limit,
284 | noise_threshold=float(config["noise_threshold"]),
285 | min_noise_samples=int(config["min_noise_samples"]),
286 | )
287 |
288 | def state(self) -> PublisherState:
289 | return self.__state
290 |
291 | def run(self) -> bool:
292 | market_open = self.__state.schedule.is_market_open(
293 | datetime.now(ZoneInfo("America/New_York")),
294 | )
295 |
296 | if not market_open:
297 | return True
298 |
299 | distance = self.__state.latest_block_slot - self.__state.slot
300 |
301 | # Pass for redemption rates because they are expected to be static for long periods
302 | if self.__state.asset_type == "Crypto Redemption Rate":
303 | return True
304 |
305 | # Pass when publisher is offline because PublisherOfflineCheck will be triggered
306 | if distance >= self.__max_slot_distance:
307 | return True
308 |
309 | current_time = int(time.time())
310 |
311 | publisher_key = (self.__state.publisher_name, self.__state.symbol)
312 | updates = PUBLISHER_CACHE[publisher_key]
313 |
314 | # Only cache new prices, let repeated prices grow stale.
315 | # These will be caught as an exact stall in the detector.
316 | is_repeated_price = updates and updates[-1].price == self.__state.price
317 | cur_update = PriceUpdate(current_time, self.__state.price)
318 | if not is_repeated_price:
319 | PUBLISHER_CACHE[publisher_key].append(cur_update)
320 |
321 | # Analyze for stalls
322 | result = self.__detector.analyze_updates(list(updates), cur_update)
323 | logger.debug(f"Stall detection result: {result}")
324 |
325 | self.__last_analysis = result # For error logging
326 |
327 | # If we've been stalled for too long, abandon this check
328 | if result.is_stalled and result.duration > self.__abandoned_time_limit:
329 | return True
330 |
331 | return not result.is_stalled
332 |
333 | def error_message(self) -> Dict[str, Any]:
334 | stall_duration = f"{self.__last_analysis.duration:.1f} seconds"
335 | return {
336 | "msg": f"{self.__state.publisher_name} has been publishing the same price of {self.__state.symbol} for {stall_duration}",
337 | "type": "PublisherStalledCheck",
338 | "publisher": self.__state.publisher_name,
339 | "symbol": self.__state.symbol,
340 | "price": self.__state.price,
341 | "stall_type": self.__last_analysis.stall_type,
342 | "stall_duration": stall_duration,
343 | "analysis": asdict(self.__last_analysis),
344 | }
345 |
346 |
347 | PUBLISHER_CHECKS = [
348 | PublisherWithinAggregateConfidenceCheck,
349 | PublisherConfidenceIntervalCheck,
350 | PublisherOfflineCheck,
351 | PublisherPriceCheck,
352 | PublisherStalledCheck,
353 | ]
354 |
--------------------------------------------------------------------------------
/tests/test_checks_publisher.py:
--------------------------------------------------------------------------------
1 | import random
2 | import time
3 | from datetime import datetime
4 | from unittest.mock import patch
5 | from zoneinfo import ZoneInfo
6 |
7 | import pytest
8 | from pythclient.market_schedule import MarketSchedule
9 | from pythclient.pythaccounts import PythPriceStatus
10 | from pythclient.solana import SolanaPublicKey
11 |
12 | from pyth_observer.check.publisher import (
13 | PUBLISHER_CACHE,
14 | PriceUpdate,
15 | PublisherOfflineCheck,
16 | PublisherPriceCheck,
17 | PublisherStalledCheck,
18 | PublisherState,
19 | )
20 |
21 |
22 | def make_publisher_state(
23 | pub_slot: int,
24 | pub_price: float,
25 | pub_conf: float,
26 | agg_slot: int,
27 | agg_price: float,
28 | agg_conf: float,
29 | asset_type: str = "Crypto",
30 | symbol: str = "Crypto.BTC/USD",
31 | ) -> PublisherState:
32 | return PublisherState(
33 | publisher_name="publisher",
34 | symbol=symbol,
35 | asset_type=asset_type,
36 | schedule=MarketSchedule("America/New_York;O,O,O,O,O,O,O;"),
37 | public_key=SolanaPublicKey("2hgu6Umyokvo8FfSDdMa9nDKhcdv9Q4VvGNhRCeSWeD3"),
38 | status=PythPriceStatus.TRADING,
39 | aggregate_status=PythPriceStatus.TRADING,
40 | slot=pub_slot,
41 | aggregate_slot=agg_slot,
42 | latest_block_slot=agg_slot,
43 | price=pub_price,
44 | price_aggregate=agg_price,
45 | confidence_interval=pub_conf,
46 | confidence_interval_aggregate=agg_conf,
47 | )
48 |
49 |
50 | def test_publisher_price_check():
51 | def check_is_ok(
52 | state: PublisherState, max_aggregate_distance: float, max_slot_distance: int
53 | ) -> bool:
54 | return PublisherPriceCheck(
55 | state,
56 | {
57 | "max_aggregate_distance": max_aggregate_distance,
58 | "max_slot_distance": max_slot_distance,
59 | },
60 | ).run()
61 |
62 | # check triggering threshold for price difference
63 | state1 = make_publisher_state(1, 100.0, 2.0, 1, 110.0, 1.0)
64 | assert check_is_ok(state1, 10, 25)
65 | assert not check_is_ok(state1, 6, 25)
66 |
67 |
68 | class TestPublisherStalledCheck:
69 | @pytest.fixture(autouse=True)
70 | def setup(self):
71 | """Clear cache and time simulation before each test"""
72 | PUBLISHER_CACHE.clear()
73 | self.current_time = int(time.time())
74 | yield
75 | PUBLISHER_CACHE.clear()
76 |
77 | def simulate_time_pass(self, seconds: float) -> float:
78 | self.current_time += seconds
79 | return self.current_time
80 |
81 | def setup_check(
82 | self,
83 | state: PublisherState,
84 | stall_time_limit: int = 5,
85 | abandoned_time_limit: int = 25,
86 | max_slot_distance: int = 25,
87 | noise_threshold: float = 1e-4,
88 | min_noise_samples: int = 10,
89 | ) -> PublisherStalledCheck:
90 | check = PublisherStalledCheck(
91 | state,
92 | {
93 | "stall_time_limit": stall_time_limit,
94 | "abandoned_time_limit": abandoned_time_limit,
95 | "max_slot_distance": max_slot_distance,
96 | "noise_threshold": noise_threshold,
97 | "min_noise_samples": min_noise_samples,
98 | },
99 | )
100 |
101 | # Seed the cache with the publisher state
102 | PUBLISHER_CACHE[(state.publisher_name, state.symbol)].append(
103 | PriceUpdate(int(self.current_time), state.price)
104 | )
105 |
106 | return check
107 |
108 | def run_check(self, check: PublisherStalledCheck, seconds: float, expected: bool):
109 | with patch("time.time", new=lambda: self.simulate_time_pass(seconds)):
110 | assert check.run() == expected
111 |
112 | def test_exact_stall_fails_check(self):
113 | state_a = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
114 | check_a = self.setup_check(state_a, stall_time_limit=5)
115 | self.run_check(check_a, 5, True) # Should pass as it hits the limit exactly
116 |
117 | PUBLISHER_CACHE.clear()
118 | state_b = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
119 | check_b = self.setup_check(state_b, stall_time_limit=5)
120 | self.run_check(check_b, 6, False) # Should fail as it exceeds the limit
121 |
122 | PUBLISHER_CACHE.clear()
123 | state_c = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
124 | check_c = self.setup_check(state_c, stall_time_limit=5)
125 | self.run_check(check_c, 2, True) # Initial check should pass
126 | state_c.price = 105.0 # Change the price
127 | self.run_check(check_c, 3, True) # Should pass as price changes
128 | state_c.price = 100.0 # Change back to original price
129 | # Simulate a stall -- send the same price repeatedly.
130 | self.run_check(check_c, 2, True)
131 | state_c.price = 100.0
132 | self.run_check(check_c, 2, True)
133 | state_c.price = 100.0
134 | self.run_check(check_c, 2, True)
135 | state_c.price = 100.0
136 | self.run_check(
137 | check_c, 2, False
138 | ) # Should fail since we breached the stall time limit
139 |
140 | PUBLISHER_CACHE.clear()
141 | state_c = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
142 | check_c = self.setup_check(state_c, stall_time_limit=5)
143 | self.run_check(check_c, 2, True) # Initial check should pass
144 | state_c.price = 105.0 # Change the price
145 | self.run_check(check_c, 3, True) # Should pass as price changes
146 | state_c.price = 100.0 # Change back to original price
147 | self.run_check(check_c, 4, True) # Should pass as price changes
148 | self.run_check(
149 | check_c, 8, False
150 | ) # Should fail as price stalls for too long after last change
151 |
152 | # Adding a check for when the publisher is offline
153 | PUBLISHER_CACHE.clear()
154 | state_d = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
155 | state_d.latest_block_slot = 25
156 | state_d.slot = 0
157 | check_d = self.setup_check(state_d, 5, 25, 25)
158 | self.run_check(check_d, 10, True) # Should pass as the publisher is offline
159 |
160 | def test_artificially_noisy_stall_fails_check(self):
161 | """Test detection of stalls with artificial noise"""
162 | state = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
163 | check = self.setup_check(state, stall_time_limit=50, min_noise_samples=10)
164 |
165 | # Add prices with small artificial noise, exceeding stall_time_limit and min_noise_updates
166 | for seconds in range(0, 55, 5):
167 | noise = state.price * (
168 | 1e-6 * (random.random() - 0.5)
169 | ) # Random noise within ±1e-4%
170 | state.price = 100.0 + noise
171 | # Should fail after 50 seconds and 10 samples
172 | self.run_check(check, 30, seconds < 55)
173 |
174 | def test_normal_price_movement_passes_check(self):
175 | """Test that normal price movements don't trigger stall detection"""
176 | state = make_publisher_state(1, 100.0, 2.0, 1, 100.0, 1.0)
177 | check = self.setup_check(state, stall_time_limit=50, min_noise_samples=10)
178 |
179 | # Add prices with significant variations to simulate real
180 | # price movements, exceeding stall_time_limit and min_noise_updates
181 | for seconds in range(0, 55, 5):
182 | state.price = 100.0 + (seconds * 0.001) # 0.1% change each time
183 | self.run_check(check, 30, True) # Should always pass
184 |
185 | def test_redemption_rate_passes_check(self):
186 | """Test that redemption rates are always allowed to be static"""
187 | state = make_publisher_state(
188 | 1,
189 | 100.0,
190 | 2.0,
191 | 1,
192 | 100.0,
193 | 1.0,
194 | asset_type="Crypto Redemption Rate",
195 | symbol="Crypto.FUSDC/USDC.RR",
196 | )
197 | check = self.setup_check(state, int(self.current_time))
198 |
199 | # Should pass even after long period without changes
200 | self.run_check(check, 3600, True) # 1 hour
201 |
202 |
203 | class TestPublisherOfflineCheck:
204 | """Test suite for PublisherOfflineCheck covering various scenarios."""
205 |
206 | def make_state(
207 | self,
208 | publisher_slot: int,
209 | latest_block_slot: int,
210 | schedule: MarketSchedule | None = None,
211 | publisher_name: str = "test_publisher",
212 | symbol: str = "Crypto.BTC/USD",
213 | ) -> PublisherState:
214 | """Helper to create PublisherState for testing."""
215 | if schedule is None:
216 | schedule = MarketSchedule("America/New_York;O,O,O,O,O,O,O;")
217 | return PublisherState(
218 | publisher_name=publisher_name,
219 | symbol=symbol,
220 | asset_type="Crypto",
221 | schedule=schedule,
222 | public_key=SolanaPublicKey("2hgu6Umyokvo8FfSDdMa9nDKhcdv9Q4VvGNhRCeSWeD3"),
223 | status=PythPriceStatus.TRADING,
224 | aggregate_status=PythPriceStatus.TRADING,
225 | slot=publisher_slot,
226 | aggregate_slot=latest_block_slot - 5,
227 | latest_block_slot=latest_block_slot,
228 | price=100.0,
229 | price_aggregate=100.0,
230 | confidence_interval=1.0,
231 | confidence_interval_aggregate=1.0,
232 | )
233 |
234 | def make_check(
235 | self,
236 | state: PublisherState,
237 | max_slot_distance: int = 10,
238 | abandoned_slot_distance: int = 100,
239 | ) -> PublisherOfflineCheck:
240 | """Helper to create PublisherOfflineCheck with config."""
241 | return PublisherOfflineCheck(
242 | state,
243 | {
244 | "max_slot_distance": max_slot_distance,
245 | "abandoned_slot_distance": abandoned_slot_distance,
246 | },
247 | )
248 |
249 | def run_check_with_datetime(
250 | self,
251 | check: PublisherOfflineCheck,
252 | check_datetime: datetime,
253 | expected: bool | None = None,
254 | ) -> bool:
255 | """Run check with mocked datetime and optionally assert result."""
256 | with patch("pyth_observer.check.publisher.datetime") as mock_datetime:
257 | mock_datetime.now.return_value = check_datetime
258 | result = check.run()
259 | if expected is not None:
260 | assert result is expected
261 | return result
262 |
263 | def test_market_closed_passes_check(self):
264 | """Test that check passes when market is closed."""
265 | # Market schedule that's always closed (C = closed)
266 | closed_schedule = MarketSchedule("America/New_York;C,C,C,C,C,C,C;")
267 | state = self.make_state(
268 | publisher_slot=100,
269 | latest_block_slot=200,
270 | schedule=closed_schedule,
271 | )
272 | check = self.make_check(state, max_slot_distance=10, abandoned_slot_distance=50)
273 |
274 | # Should pass regardless of slot distance when market is closed
275 | assert check.run() is True
276 |
277 | def test_market_open_within_max_distance_passes(self):
278 | """Test that check passes when slot distance is within max_slot_distance."""
279 | state = self.make_state(publisher_slot=100, latest_block_slot=105)
280 | check = self.make_check(
281 | state, max_slot_distance=10, abandoned_slot_distance=100
282 | )
283 |
284 | assert check.run() is True
285 |
286 | def test_market_open_exceeds_max_distance_fails(self):
287 | """Test that check fails when slot distance exceeds max_slot_distance but not abandoned."""
288 | state = self.make_state(publisher_slot=100, latest_block_slot=120)
289 | check = self.make_check(
290 | state, max_slot_distance=10, abandoned_slot_distance=100
291 | )
292 |
293 | assert check.run() is False
294 |
295 | def test_market_open_exceeds_abandoned_distance_passes(self):
296 | """Test that check passes when slot distance exceeds abandoned_slot_distance."""
297 | state = self.make_state(publisher_slot=100, latest_block_slot=250)
298 | check = self.make_check(
299 | state, max_slot_distance=10, abandoned_slot_distance=100
300 | )
301 |
302 | assert check.run() is True
303 |
304 | def test_boundary_at_max_slot_distance(self):
305 | """Test boundary condition at max_slot_distance."""
306 | state = self.make_state(publisher_slot=100, latest_block_slot=110)
307 | check = self.make_check(
308 | state, max_slot_distance=10, abandoned_slot_distance=100
309 | )
310 |
311 | assert check.run() is False
312 |
313 | def test_boundary_below_max_slot_distance(self):
314 | """Test boundary condition just below max_slot_distance."""
315 | state = self.make_state(publisher_slot=100, latest_block_slot=109)
316 | check = self.make_check(
317 | state, max_slot_distance=10, abandoned_slot_distance=100
318 | )
319 |
320 | assert check.run() is True
321 |
322 | def test_boundary_at_abandoned_slot_distance(self):
323 | """Test boundary condition at abandoned_slot_distance."""
324 | state = self.make_state(publisher_slot=100, latest_block_slot=200)
325 | check = self.make_check(
326 | state, max_slot_distance=10, abandoned_slot_distance=100
327 | )
328 |
329 | # Distance is exactly 100, which is not > 100, so should fail
330 | assert check.run() is False
331 |
332 | def test_boundary_above_abandoned_slot_distance(self):
333 | """Test boundary condition just above abandoned_slot_distance."""
334 | state = self.make_state(publisher_slot=100, latest_block_slot=201)
335 | check = self.make_check(
336 | state, max_slot_distance=10, abandoned_slot_distance=100
337 | )
338 |
339 | # Distance is 101, which is > 100, so should pass (abandoned)
340 | assert check.run() is True
341 |
342 | def test_different_configurations(self):
343 | """Test with different configuration values."""
344 | state = self.make_state(publisher_slot=100, latest_block_slot=150)
345 |
346 | # Test with larger max_slot_distance - distance is 50, which is < 60, so should pass
347 | check1 = self.make_check(
348 | state, max_slot_distance=60, abandoned_slot_distance=200
349 | )
350 | assert check1.run() is True
351 |
352 | # Test with smaller abandoned_slot_distance - distance is 50, which is > 40, so should pass (abandoned)
353 | check2 = self.make_check(
354 | state, max_slot_distance=10, abandoned_slot_distance=40
355 | )
356 | assert check2.run() is True
357 |
358 | def test_zero_distance_passes(self):
359 | """Test that zero slot distance passes the check."""
360 | state = self.make_state(publisher_slot=100, latest_block_slot=100)
361 | check = self.make_check(
362 | state, max_slot_distance=10, abandoned_slot_distance=100
363 | )
364 |
365 | assert check.run() is True
366 |
367 | def test_market_schedule_variations(self):
368 | """Test with different market schedule patterns."""
369 | # Test with weekday-only schedule (Mon-Fri open)
370 | weekday_schedule = MarketSchedule("America/New_York;O,O,O,O,O,C,C;")
371 | state = self.make_state(
372 | publisher_slot=100,
373 | latest_block_slot=120,
374 | schedule=weekday_schedule,
375 | )
376 | check = self.make_check(
377 | state, max_slot_distance=10, abandoned_slot_distance=100
378 | )
379 |
380 | # Test on a Monday (market open) - should fail because market is open and distance exceeds max
381 | monday_open = datetime(
382 | 2024, 1, 15, 14, 0, 0, tzinfo=ZoneInfo("America/New_York")
383 | )
384 | self.run_check_with_datetime(check, monday_open, expected=False)
385 |
386 | # Test on a Sunday (market closed) - should pass because market is closed
387 | sunday_closed = datetime(
388 | 2024, 1, 14, 14, 0, 0, tzinfo=ZoneInfo("America/New_York")
389 | )
390 | self.run_check_with_datetime(check, sunday_closed, expected=True)
391 |
392 | def test_market_opening_detects_offline_publisher(self):
393 | """Test that when market opens, an offline publisher triggers the check."""
394 | # Use a weekday-only schedule (Mon-Fri open, weekends closed)
395 | weekday_schedule = MarketSchedule("America/New_York;O,O,O,O,O,C,C;")
396 | # Create a state where publisher is offline (slot distance exceeds max)
397 | state = self.make_state(
398 | publisher_slot=100,
399 | latest_block_slot=120,
400 | schedule=weekday_schedule,
401 | )
402 | check = self.make_check(
403 | state, max_slot_distance=10, abandoned_slot_distance=100
404 | )
405 |
406 | # First, verify market closed - should pass even with offline publisher
407 | market_closed_time = datetime(
408 | 2024, 1, 14, 23, 59, 59, tzinfo=ZoneInfo("America/New_York")
409 | ) # Sunday night (market closed)
410 | self.run_check_with_datetime(check, market_closed_time, expected=True)
411 |
412 | # Now market opens - check should fire because publisher is offline
413 | # Distance is 20, which exceeds max_slot_distance of 10
414 | market_open_time = datetime(
415 | 2024, 1, 15, 0, 0, 0, tzinfo=ZoneInfo("America/New_York")
416 | ) # Monday morning (market open)
417 | self.run_check_with_datetime(check, market_open_time, expected=False)
418 |
--------------------------------------------------------------------------------
/sample.coingecko.yaml:
--------------------------------------------------------------------------------
1 | {
2 | "Crypto.0G/USD": "zero-gravity",
3 | "Crypto.1INCH/USD": "1inch",
4 | "Crypto.2Z/USD": "doublezero",
5 | "Crypto.4/USD": "4-2",
6 | "Crypto.A/USD": "vaulta",
7 | "Crypto.AAPLX/USD": "apple-xstock",
8 | "Crypto.AAVE/USD": "aave",
9 | "Crypto.ACT/USD": "act-i-the-ai-prophecy",
10 | "Crypto.ADA/USD": "cardano",
11 | "Crypto.AERGO/USD": "aergo",
12 | "Crypto.AERO/USD": "aerodrome-finance",
13 | "Crypto.AEVO/USD": "aevo-exchange",
14 | "Crypto.AFSUI/USD": "aftermath-staked-sui",
15 | "Crypto.AI16Z/USD": "ai16z",
16 | "Crypto.AIXBT/USD": "aixbt",
17 | "Crypto.AKT/USD": "akash-network",
18 | "Crypto.ALGO/USD": "algorand",
19 | "Crypto.ALICE/USD": "alice",
20 | "Crypto.ALKIMI/USD": "alkimi-2",
21 | "Crypto.ALT/USD": "altlayer",
22 | "Crypto.AMI/USD": "ami",
23 | "Crypto.AMP/USD": "amp-token",
24 | "Crypto.ANIME/USD": "anime",
25 | "Crypto.ANKR/USD": "ankr",
26 | "Crypto.ANON/USD": "anon-2",
27 | "Crypto.APE/USD": "apecoin",
28 | "Crypto.APEX/USD": "apex-token-2",
29 | "Crypto.API3/USD": "api3",
30 | "Crypto.APT/USD": "aptos",
31 | "Crypto.AR/USD": "arweave",
32 | "Crypto.ARB/USD": "arbitrum",
33 | "Crypto.ARC/USD": "ai-rig-complex",
34 | "Crypto.ARKM/USD": "arkham",
35 | "Crypto.ASTER/USD": "aster-2",
36 | "Crypto.ASTR/USD": "astar",
37 | "Crypto.ATH/USD": "aethir",
38 | "Crypto.ATLAS/USD": "star-atlas",
39 | "Crypto.ATOM/USD": "cosmos",
40 | "Crypto.AUDD/USD": "novatti-australian-digital-dollar",
41 | "Crypto.AUDIO/USD": "audius",
42 | "Crypto.AURORA/USD": "aurora-near",
43 | "Crypto.AUSD/USD": "agora-dollar",
44 | "Crypto.AVAIL/USD": "avail",
45 | "Crypto.AVAX/USD": "avalanche-2",
46 | "Crypto.AVNT/USD": "avantis",
47 | "Crypto.AXL/USD": "axelar",
48 | "Crypto.AXS/USD": "axie-infinity",
49 | "Crypto.B3/USD": "b3",
50 | "Crypto.BABY/USD": "babylon",
51 | "Crypto.BABYDOGE/USD": "baby-doge-coin",
52 | "Crypto.BAL/USD": "balancer",
53 | "Crypto.BAN/USD": "comedian",
54 | "Crypto.BAND/USD": "band-protocol",
55 | "Crypto.BAT/USD": "basic-attention-token",
56 | "Crypto.BBSOL/USD": "bybit-staked-sol",
57 | "Crypto.BCH/USD": "bitcoin-cash",
58 | "Crypto.BELIEVE/USD": "ben-pasternak",
59 | "Crypto.BENJI/USD": "basenji",
60 | "Crypto.BERA/USD": "berachain-bera",
61 | "Crypto.BGB/USD": "bitget-token",
62 | "Crypto.BIO/USD": "bio-protocol",
63 | "Crypto.BITCOIN/USD": "harrypotterobamasonic10inu",
64 | "Crypto.BLAST/USD": "blast",
65 | "Crypto.BLUE/USD": "bluefin",
66 | "Crypto.BLUR/USD": "blur",
67 | "Crypto.BLZE/USD": "solblaze",
68 | "Crypto.BMT/USD": "bubblemaps",
69 | "Crypto.BNB/USD": "binancecoin",
70 | "Crypto.BNSOL/USD": "binance-staked-sol",
71 | "Crypto.BOBA/USD": "boba-network",
72 | "Crypto.BODEN/USD": "jeo-boden",
73 | "Crypto.BOLD/USD": "bold-2",
74 | "Crypto.BOME/USD": "book-of-meme",
75 | "Crypto.BONK/USD": "bonk",
76 | "Crypto.BORG/USD": "swissborg",
77 | "Crypto.BRETT/USD": "brett",
78 | "Crypto.BROCCOLI/USD": "czs-dog",
79 | "Crypto.BSOL/USD": "blazestake-staked-sol",
80 | "Crypto.BSV/USD": "bitcoin-cash-sv",
81 | "Crypto.BTC/USD": "bitcoin",
82 | "Crypto.BTT/USD": "bittorrent",
83 | "Crypto.BUCK/USD": "bucket-protocol-buck-stablecoin",
84 | "Crypto.BUCKET.USDB/USD": "bucket-usd",
85 | "Crypto.BUDDY/USD": "alright-buddy",
86 | "Crypto.BYUSD/USD": "byusd",
87 | "Crypto.C98/USD": "coin98",
88 | "Crypto.CAKE/USD": "pancakeswap-token",
89 | "Crypto.CAMP/USD": "camp-network",
90 | "Crypto.CARV/USD": "carv",
91 | "Crypto.CASH/USD": "cash-2",
92 | "Crypto.CAT/USD": "cat-3",
93 | "Crypto.CBBTC/USD": "coinbase-wrapped-btc",
94 | "Crypto.CBDOGE/USD": "coinbase-wrapped-doge",
95 | "Crypto.CBETH/USD": "coinbase-wrapped-staked-eth",
96 | "Crypto.CBXRP/USD": "coinbase-wrapped-xrp",
97 | "Crypto.CC/USD": "canton-network",
98 | "Crypto.CELO/USD": "celo",
99 | "Crypto.CELR/USD": "celer-network",
100 | "Crypto.CETUS/USD": "cetus-protocol",
101 | "Crypto.CFX/USD": "conflux-token",
102 | "Crypto.CHILLGUY/USD": "chill-guy",
103 | "Crypto.CHR/USD": "chromaway",
104 | "Crypto.CHZ/USD": "chiliz",
105 | "Crypto.CLANKER/USD": "tokenbot-2",
106 | "Crypto.CLOUD/USD": "sanctum-2",
107 | "Crypto.COINX/USD": "coinbase-xstock",
108 | "Crypto.COMP/USD": "compound-governance-token",
109 | "Crypto.COOK/USD": "meth-protocol",
110 | "Crypto.COOKIE/USD": "cookie",
111 | "Crypto.COQ/USD": "coq-inu",
112 | "Crypto.CORE/USD": "core-2",
113 | "Crypto.COW/USD": "cow",
114 | "Crypto.CRCLX/USD": "circle-xstock",
115 | "Crypto.CRO/USD": "crypto-com-chain",
116 | "Crypto.CRV/USD": "curve-dao-token",
117 | "Crypto.CSPR/USD": "casper-network",
118 | "Crypto.CTSI/USD": "cartesi",
119 | "Crypto.CVX/USD": "convex-finance",
120 | "Crypto.DAI/USD": "dai",
121 | "Crypto.DASH/USD": "dash",
122 | "Crypto.DBR/USD": "debridge",
123 | "Crypto.DEEP/USD": "deep",
124 | "Crypto.DEGEN/USD": "degen-base",
125 | "Crypto.DEUSD/USD": "elixir-deusd",
126 | "Crypto.DEXE/USD": "dexe",
127 | "Crypto.DMC/USD": "delorean",
128 | "Crypto.DODO/USD": "dodo",
129 | "Crypto.DOGE/USD": "dogecoin",
130 | "Crypto.DOGINME/USD": "doginme",
131 | "Crypto.DOGS/USD": "dogs-2",
132 | "Crypto.DOLO/USD": "dolomite",
133 | "Crypto.DOT/USD": "polkadot",
134 | "Crypto.DRIFT/USD": "drift-protocol",
135 | "Crypto.DSOL/USD": "drift-staked-sol",
136 | "Crypto.DYDX/USD": "dydx-chain",
137 | "Crypto.DYM/USD": "dymension",
138 | "Crypto.EBTC/USD": "ether-fi-staked-btc",
139 | "Crypto.ECHO/USD": "echo-protocol",
140 | "Crypto.EDU/USD": "edu-coin",
141 | "Crypto.EGLD/USD": "elrond-erd-2",
142 | "Crypto.EIGEN/USD": "eigenlayer",
143 | "Crypto.ELIZAOS/USD": "elizaos",
144 | "Crypto.ELON/USD": "dogelon-mars",
145 | "Crypto.ENA/USD": "ethena",
146 | "Crypto.ENJ/USD": "enjincoin",
147 | "Crypto.ENS/USD": "ethereum-name-service",
148 | "Crypto.ES/USD": "eclipse-3",
149 | "Crypto.ETC/USD": "ethereum-classic",
150 | "Crypto.ETH/USD": "ethereum",
151 | "Crypto.ETHFI/USD": "ether-fi",
152 | "Crypto.ETHW/USD": "ethereum-pow-iou",
153 | "Crypto.EUL/USD": "euler",
154 | "Crypto.EURC/USD": "euro-coin",
155 | "Crypto.EURCV/USD": "societe-generale-forge-eurcv",
156 | "Crypto.EVAA/USD": "evaa-protocol",
157 | "Crypto.EZETH/USD": "renzo-restaked-eth",
158 | "Crypto.F/USD": "synfutures",
159 | "Crypto.FAI/USD": "freysa-ai",
160 | "Crypto.FARTCOIN/USD": "fartcoin",
161 | "Crypto.FDIT/USD": "fidelity-digital-interest-token",
162 | "Crypto.FDUSD/USD": "first-digital-usd",
163 | "Crypto.FET/USD": "fetch-ai",
164 | "Crypto.FEUSD/USD": "felix-feusd",
165 | "Crypto.FF/USD": "falcon-finance-ff",
166 | "Crypto.FIDA/USD": "bonfida",
167 | "Crypto.FIL/USD": "filecoin",
168 | "Crypto.FLOKI/USD": "floki",
169 | "Crypto.FLOW/USD": "flow",
170 | "Crypto.FLR/USD": "flare-networks",
171 | "Crypto.FLUID/USD": "instadapp",
172 | "Crypto.FORM/USD": "four",
173 | "Crypto.FOXY/USD": "foxy",
174 | "Crypto.FRAG/USD": "fragmetric",
175 | "Crypto.FRAX/USD": "frax",
176 | "Crypto.FRXETH/USD": "frax-ether",
177 | "Crypto.FRXUSD/USD": "frax-usd",
178 | "Crypto.FTT/USD": "ftx-token",
179 | "Crypto.FUEL/USD": "fuel-network",
180 | "Crypto.FWOG/USD": "fwog",
181 | "Crypto.G/USD": "g-2",
182 | "Crypto.GALA/USD": "gala",
183 | "Crypto.GHO/USD": "gho",
184 | "Crypto.GIGA/USD": "gigachad-2",
185 | "Crypto.GLM/USD": "golem",
186 | "Crypto.GLMR/USD": "moonbeam",
187 | "Crypto.GMT/USD": "stepn",
188 | "Crypto.GMX/USD": "gmx",
189 | "Crypto.GNO/USD": "gnosis",
190 | "Crypto.GNS/USD": "gains-network",
191 | "Crypto.GOAT/USD": "goat",
192 | "Crypto.GOGLZ/USD": "googles",
193 | "Crypto.GOLD/USD": "gold-2",
194 | "Crypto.GOOGLX/USD": "alphabet-xstock",
195 | "Crypto.GORK/USD": "gork",
196 | "Crypto.GP/USD": "graphite-protocol",
197 | "Crypto.GPS/USD": "goplus-security",
198 | "Crypto.GRAIL/USD": "camelot-token",
199 | "Crypto.GRASS/USD": "grass",
200 | "Crypto.GRIFFAIN/USD": "griffain",
201 | "Crypto.GRT/USD": "the-graph",
202 | "Crypto.GT/USD": "gatechain-token",
203 | "Crypto.GUSD/USD": "gemini-dollar",
204 | "Crypto.H/USD": "humanity",
205 | "Crypto.HAEDAL/USD": "haedal",
206 | "Crypto.HASUI/USD": "haedal-staked-sui",
207 | "Crypto.HBAR/USD": "hedera-hashgraph",
208 | "Crypto.HEMI/USD": "hemi",
209 | "Crypto.HFT/USD": "hashflow",
210 | "Crypto.HFUN/USD": "hypurr-fun",
211 | "Crypto.HIPPO/USD": "sudeng",
212 | "Crypto.HNT/USD": "helium",
213 | "Crypto.HOLO/USD": "holoworld",
214 | "Crypto.HONEY/USD": "honey-3",
215 | "Crypto.HOODX/USD": "robinhood-xstock",
216 | "Crypto.HT/USD": "huobi-token",
217 | "Crypto.HUMA/USD": "huma-finance",
218 | "Crypto.HYPE/USD": "hyperliquid",
219 | "Crypto.HYPER/USD": "hyperlane",
220 | "Crypto.HYPERSTABLE.USH/USD": "hyperstable",
221 | "Crypto.IBERA/USD": "infrared-bera",
222 | "Crypto.IBGT/USD": "infrafred-bgt",
223 | "Crypto.ICP/USD": "internet-computer",
224 | "Crypto.ICX/USD": "icon",
225 | "Crypto.IDEX/USD": "aurora-dao",
226 | "Crypto.IKA/USD": "ika",
227 | "Crypto.ILV/USD": "illuvium",
228 | "Crypto.IMX/USD": "immutable-x",
229 | "Crypto.INF/USD": "socean-staked-sol",
230 | "Crypto.INIT/USD": "initia",
231 | "Crypto.INJ/USD": "injective-protocol",
232 | "Crypto.IO/USD": "io",
233 | "Crypto.IOTA/USD": "iota",
234 | "Crypto.IOTX/USD": "iotex",
235 | "Crypto.IP/USD": "story-2",
236 | "Crypto.JASMY/USD": "jasmycoin",
237 | "Crypto.JITOSOL/USD": "jito-staked-sol",
238 | "Crypto.JLP/USD": "jupiter-perpetuals-liquidity-provider-token",
239 | "Crypto.JOE/USD": "joe",
240 | "Crypto.JTO/USD": "jito-governance-token",
241 | "Crypto.JUP/USD": "jupiter",
242 | "Crypto.KAIA/USD": "kaia",
243 | "Crypto.KAITO/USD": "kaito",
244 | "Crypto.KAPT/USD": "kofi-aptos",
245 | "Crypto.KAS/USD": "kaspa",
246 | "Crypto.KAVA/USD": "kava",
247 | "Crypto.KCS/USD": "kucoin-shares",
248 | "Crypto.KERNEL/USD": "kernel-2",
249 | "Crypto.KHYPE/USD": "kinetic-staked-hype",
250 | "Crypto.KMNO/USD": "kamino",
251 | "Crypto.KNC/USD": "kyber-network-crystal",
252 | "Crypto.KNTQ/USD": "kinetiq",
253 | "Crypto.KSM/USD": "kusama",
254 | "Crypto.KTA/USD": "keeta",
255 | "Crypto.LA/USD": "lagrange",
256 | "Crypto.LAYER/USD": "solayer",
257 | "Crypto.LBGT/USD": "liquid-bgt",
258 | "Crypto.LBTC/USD": "lombard-staked-btc",
259 | "Crypto.LDO/USD": "lido-dao",
260 | "Crypto.LEO/USD": "leo-2",
261 | "Crypto.LHYPE/USD": "looped-hype",
262 | "Crypto.LINEA/USD": "linea",
263 | "Crypto.LINK/USD": "chainlink",
264 | "Crypto.LION/USD": "loaded-lions",
265 | "Crypto.LL/USD": "lightlink",
266 | "Crypto.LMTS/USD": "limitless-3",
267 | "Crypto.LOFI/USD": "lofi-2",
268 | "Crypto.LOOKS/USD": "looksrare",
269 | "Crypto.LQTY/USD": "liquity",
270 | "Crypto.LRC/USD": "loopring",
271 | "Crypto.LST/USD": "liquid-staking-token",
272 | "Crypto.LTC/USD": "litecoin",
273 | "Crypto.LUCE/USD": "offcial-mascot-of-the-holy-year",
274 | "Crypto.LUNA/USD": "terra-luna-2",
275 | "Crypto.LUNC/USD": "terra-luna",
276 | "Crypto.LUSD/USD": "liquity-usd",
277 | "Crypto.MAG7-SSI/USD": "mag7-ssi",
278 | "Crypto.MANA/USD": "decentraland",
279 | "Crypto.MANEKI/USD": "maneki",
280 | "Crypto.MANTA/USD": "manta-network",
281 | "Crypto.MASK/USD": "mask-network",
282 | "Crypto.MAV/USD": "maverick-protocol",
283 | "Crypto.MCDX/USD": "mcdonald-s-xstock",
284 | "Crypto.ME/USD": "magic-eden",
285 | "Crypto.MELANIA/USD": "melania-meme",
286 | "Crypto.MEME/USD": "memecoin-2",
287 | "Crypto.MERL/USD": "merlin-chain",
288 | "Crypto.MET/USD": "meteora",
289 | "Crypto.META/USD": "meta-2-2",
290 | "Crypto.METASTABLE.MUSD/USD": "mad-usd",
291 | "Crypto.METAX/USD": "meta-xstock",
292 | "Crypto.METH/USD": "mantle-staked-ether",
293 | "Crypto.METIS/USD": "metis-token",
294 | "Crypto.MEW/USD": "cat-in-a-dogs-world",
295 | "Crypto.MEZO.MUSD/USD": "mezo-usd",
296 | "Crypto.MHYPE/USD": "hyperpie-staked-mhype",
297 | "Crypto.MIM/USD": "magic-internet-money",
298 | "Crypto.MINA/USD": "mina-protocol",
299 | "Crypto.MMT/USD": "momentum-3",
300 | "Crypto.MNDE/USD": "marinade",
301 | "Crypto.MNT/USD": "mantle",
302 | "Crypto.MOBILE/USD": "helium-mobile",
303 | "Crypto.MOBY/USD": "moby",
304 | "Crypto.MODE/USD": "mode",
305 | "Crypto.MOG/USD": "mog",
306 | "Crypto.MON/USD": "monad",
307 | "Crypto.MOODENG/USD": "moo-deng",
308 | "Crypto.MORPHO/USD": "morpho",
309 | "Crypto.MOTHER/USD": "mother-iggy",
310 | "Crypto.MOVE/USD": "movement",
311 | "Crypto.MSETH/USD": "metronome-synth-eth",
312 | "Crypto.MSOL/USD": "msol",
313 | "Crypto.MSTRX/USD": "microstrategy-xstock",
314 | "Crypto.MSUSD/USD": "main-street-usd",
315 | "Crypto.MTRG/USD": "meter",
316 | "Crypto.MUBARAK/USD": "mubarak",
317 | "Crypto.MYRO/USD": "myro",
318 | "Crypto.MYX/USD": "myx-finance",
319 | "Crypto.NAVX/USD": "navi",
320 | "Crypto.NEAR/USD": "near",
321 | "Crypto.NECT/USD": "nectar",
322 | "Crypto.NEIRO/USD": "neiro",
323 | "Crypto.NEON/USD": "neon",
324 | "Crypto.NEXO/USD": "nexo",
325 | "Crypto.NFLXX/USD": "netflix-xstock",
326 | "Crypto.NIL/USD": "nillion",
327 | "Crypto.NOBODY/USD": "nobody-sausage",
328 | "Crypto.NOT/USD": "notcoin",
329 | "Crypto.NS/USD": "nodestats",
330 | "Crypto.NTRN/USD": "neutron-3",
331 | "Crypto.NVDAX/USD": "nvidia-xstock",
332 | "Crypto.NXPC/USD": "nexpace",
333 | "Crypto.ODOS/USD": "odos",
334 | "Crypto.OG/USD": "og-fan-token",
335 | "Crypto.OGN/USD": "origin-protocol",
336 | "Crypto.OHM/USD": "olympus",
337 | "Crypto.OKB/USD": "okb",
338 | "Crypto.OM/USD": "mantra-dao",
339 | "Crypto.OMI/USD": "ecomi",
340 | "Crypto.ONDO/USD": "ondo-finance",
341 | "Crypto.ONE/USD": "harmony",
342 | "Crypto.OP/USD": "optimism",
343 | "Crypto.ORCA/USD": "orca",
344 | "Crypto.ORDER/USD": "order-2",
345 | "Crypto.ORDI/USD": "ordinals",
346 | "Crypto.ORE/USD": "ore",
347 | "Crypto.OS/USD": "origin-staked-s",
348 | "Crypto.OSMO/USD": "osmosis",
349 | "Crypto.OUSDT/USD": "openusdt",
350 | "Crypto.P33/USD": "pharaoh-liquid-staking-token",
351 | "Crypto.PARTI/USD": "particle-network",
352 | "Crypto.PAXG/USD": "pax-gold",
353 | "Crypto.PENDLE/USD": "pendle",
354 | "Crypto.PENGU/USD": "penguiana",
355 | "Crypto.PEOPLE/USD": "constitutiondao",
356 | "Crypto.PEPE/USD": "pepe",
357 | "Crypto.PERP/USD": "perpetual-protocol",
358 | "Crypto.PI/USD": "pi-network",
359 | "Crypto.PLUME/USD": "plume",
360 | "Crypto.PNUT/USD": "peanut-3",
361 | "Crypto.POL/USD": "polygon-ecosystem-token",
362 | "Crypto.PONKE/USD": "ponke",
363 | "Crypto.POPCAT/USD": "popcat",
364 | "Crypto.PRCL/USD": "parcl",
365 | "Crypto.PRIME/USD": "echelon-prime",
366 | "Crypto.PROMPT/USD": "wayfinder",
367 | "Crypto.PROVE/USD": "succinct",
368 | "Crypto.PSG/USD": "paris-saint-germain-fan-token",
369 | "Crypto.PUMP/USD": "pump",
370 | "Crypto.PURR/USD": "purr-2",
371 | "Crypto.PYTH/USD": "pyth-network",
372 | "Crypto.PYUSD/USD": "paypal-usd",
373 | "Crypto.QNT/USD": "quant-network",
374 | "Crypto.QQQX/USD": "nasdaq-xstock",
375 | "Crypto.QTUM/USD": "qtum",
376 | "Crypto.RAY/USD": "raydium",
377 | "Crypto.RDNT/USD": "radiant-capital",
378 | "Crypto.RED/USD": "redstone-oracles",
379 | "Crypto.RENDER/USD": "render-token",
380 | "Crypto.RESOLV/USD": "resolv",
381 | "Crypto.RETARDIO/USD": "retardio",
382 | "Crypto.RETH/USD": "rocket-pool-eth",
383 | "Crypto.REX33/USD": "etherex-liquid-staking-token",
384 | "Crypto.REZ/USD": "renzo",
385 | "Crypto.RHEA/USD": "rhea-2",
386 | "Crypto.RION/USD": "hyperion-2",
387 | "Crypto.RLB/USD": "rollbit-coin",
388 | "Crypto.RLP/USD": "resolv-rlp",
389 | "Crypto.RLUSD/USD": "ripple-usd",
390 | "Crypto.RON/USD": "ronin",
391 | "Crypto.ROSE/USD": "oasis-network",
392 | "Crypto.RPL/USD": "rocket-pool",
393 | "Crypto.RSETH/USD": "kelp-dao-restaked-eth",
394 | "Crypto.RSR/USD": "reserve-rights-token",
395 | "Crypto.RSWETH/USD": "restaked-swell-eth",
396 | "Crypto.RUNE/USD": "thorchain",
397 | "Crypto.S/USD": "sonic-3",
398 | "Crypto.SAFE/USD": "safe",
399 | "Crypto.SAMO/USD": "samoyedcoin",
400 | "Crypto.SAND/USD": "the-sandbox",
401 | "Crypto.SATS/USD": "sats-ordinals",
402 | "Crypto.SCA/USD": "scallop-2",
403 | "Crypto.SCETH/USD": "rings-sc-eth",
404 | "Crypto.SCR/USD": "scroll",
405 | "Crypto.SCRT/USD": "secret",
406 | "Crypto.SCUSD/USD": "rings-scusd",
407 | "Crypto.SD/USD": "stader",
408 | "Crypto.SDAI/USD": "savings-dai",
409 | "Crypto.SEDA/USD": "seda-2",
410 | "Crypto.SEI/USD": "sei-network",
411 | "Crypto.SEND/USD": "suilend",
412 | "Crypto.SFRXETH/USD": "staked-frax-ether",
413 | "Crypto.SHADOW/USD": "shadow-2",
414 | "Crypto.SHIB/USD": "shiba-inu",
415 | "Crypto.SIGN/USD": "sign-global",
416 | "Crypto.SKATE/USD": "skate",
417 | "Crypto.SKI/USD": "ski-mask-dog",
418 | "Crypto.SKL/USD": "skale",
419 | "Crypto.SKY/USD": "sky",
420 | "Crypto.SLP/USD": "smooth-love-potion",
421 | "Crypto.SNX/USD": "havven",
422 | "Crypto.SOL/USD": "solana",
423 | "Crypto.SOLV/USD": "solv-protocol",
424 | "Crypto.SOLVBTC/USD": "solv-btc",
425 | "Crypto.SONIC/USD": "sonic-2",
426 | "Crypto.SOON/USD": "soon-2",
427 | "Crypto.SOPH/USD": "sophon",
428 | "Crypto.SPELL/USD": "spell-token",
429 | "Crypto.SPK/USD": "spark-2",
430 | "Crypto.SPX6900/USD": "based-spx6900",
431 | "Crypto.SPYX/USD": "sp500-xstock",
432 | "Crypto.STBL/USD": "stbl",
433 | "Crypto.STETH/USD": "staked-ether",
434 | "Crypto.STG/USD": "stargate-finance",
435 | "Crypto.STHYPE/USD": "staked-hype",
436 | "Crypto.STONE/USD": "stakestone-ether",
437 | "Crypto.STORJ/USD": "storj",
438 | "Crypto.STREAM/USD": "streamflow",
439 | "Crypto.STRK/USD": "starknet",
440 | "Crypto.STS/USD": "beets-staked-sonic",
441 | "Crypto.STSUI/USD": "alphafi-staked-sui",
442 | "Crypto.STX/USD": "blockstack",
443 | "Crypto.SUI/USD": "sui",
444 | "Crypto.SUN/USD": "sun-token",
445 | "Crypto.SUSDE/USD": "ethena-staked-usde",
446 | "Crypto.SUSHI/USD": "sushi",
447 | "Crypto.SWARMS/USD": "swarms",
448 | "Crypto.SWETH/USD": "sweth",
449 | "Crypto.SXP/USD": "swipe",
450 | "Crypto.SYN/USD": "synapse-2",
451 | "Crypto.SYRUP/USD": "syrup",
452 | "Crypto.TAC/USD": "tac",
453 | "Crypto.TAIKO/USD": "taiko",
454 | "Crypto.TAO/USD": "bittensor",
455 | "Crypto.TBTC/USD": "tbtc",
456 | "Crypto.THAPT/USD": "thala-apt",
457 | "Crypto.THE/USD": "thena",
458 | "Crypto.THETA/USD": "theta-token",
459 | "Crypto.THL/USD": "thala",
460 | "Crypto.TIA/USD": "celestia",
461 | "Crypto.TNSR/USD": "tensor",
462 | "Crypto.TOKEN/USD": "tokenfi",
463 | "Crypto.TON/USD": "the-open-network",
464 | "Crypto.TOSHI/USD": "toshi",
465 | "Crypto.TRB/USD": "tellor",
466 | "Crypto.TRUMP/USD": "official-trump",
467 | "Crypto.TRX/USD": "tron",
468 | "Crypto.TSLAX/USD": "tesla-xstock",
469 | "Crypto.TST/USD": "test-3",
470 | "Crypto.TURBO/USD": "turbo",
471 | "Crypto.TURBOS/USD": "turbos-finance",
472 | "Crypto.TUSD/USD": "true-usd",
473 | "Crypto.TUT/USD": "tutorial",
474 | "Crypto.TWT/USD": "trust-wallet-token",
475 | "Crypto.UBTC/USD": "unit-bitcoin",
476 | "Crypto.UETH/USD": "unit-ethereum",
477 | "Crypto.UFART/USD": "unit-fartcoin",
478 | "Crypto.UMA/USD": "uma",
479 | "Crypto.UNI/USD": "uniswap",
480 | "Crypto.UP/USD": "doubleup",
481 | "Crypto.URANUS/USD": "uranus-2",
482 | "Crypto.USD0++/USD": "usd0-liquid-bond",
483 | "Crypto.USD0/USD": "usual-usd",
484 | "Crypto.USDA/USD": "auro-usda",
485 | "Crypto.USDAF/USD": "asymmetry-usdaf-2",
486 | "Crypto.USDAI/USD": "usdai",
487 | "Crypto.USDB/USD": "bucket-usd",
488 | "Crypto.USDC/USD": "usd-coin",
489 | "Crypto.USDD/USD": "usdd",
490 | "Crypto.USDE/USD": "ethena-usde",
491 | "Crypto.USDF/USD": "falcon-finance",
492 | "Crypto.USDG/USD": "global-dollar",
493 | "Crypto.USDH/USD": "hermetica-usdh",
494 | "Crypto.USDHL/USD": "hyper-usd",
495 | "Crypto.USDL/USD": "lift-dollar",
496 | "Crypto.USDN/USD": "smardex-usdn",
497 | "Crypto.USDP/USD": "paxos-standard",
498 | "Crypto.USDS/USD": "usds",
499 | "Crypto.USDT/USD": "tether",
500 | "Crypto.USDT0/USD": "usdt0",
501 | "Crypto.USDTB/USD": "usdtb",
502 | "Crypto.USDU/USD": "uncap-usd",
503 | "Crypto.USDXL/USD": "last-usd",
504 | "Crypto.USDY/USD": "ondo-us-dollar-yield",
505 | "Crypto.USELESS/USD": "useless-3",
506 | "Crypto.USOL/USD": "unit-solana",
507 | "Crypto.USR/USD": "resolv-usr",
508 | "Crypto.USTC/USD": "terrausd",
509 | "Crypto.USUAL/USD": "usual",
510 | "Crypto.USX/USD": "token-dforce-usd",
511 | "Crypto.VANA/USD": "vana",
512 | "Crypto.VELODROME.VELO/USD": "velo",
513 | "Crypto.VET/USD": "vechain",
514 | "Crypto.VIC/USD": "tomochain",
515 | "Crypto.VINE/USD": "vine",
516 | "Crypto.VIRTUAL/USD": "virtual-protocol",
517 | "Crypto.VSUI/USD": "volo-staked-sui",
518 | "Crypto.VVV/USD": "venice-token",
519 | "Crypto.W/USD": "w",
520 | "Crypto.WAGMI/USD": "wagmi-2",
521 | "Crypto.WAL/USD": "walrus-2",
522 | "Crypto.WAVES/USD": "waves",
523 | "Crypto.WBETH/USD": "wrapped-beacon-eth",
524 | "Crypto.WBTC/USD": "wrapped-bitcoin",
525 | "Crypto.WCT/USD": "connect-token-wct",
526 | "Crypto.WEETH/USD": "wrapped-eeth",
527 | "Crypto.WELL/USD": "moonwell-artemis",
528 | "Crypto.WEN/USD": "wen-4",
529 | "Crypto.WETH/USD": "weth",
530 | "Crypto.WFRAGSOL/USD": "wrapped-fragsol",
531 | "Crypto.WIF/USD": "dogwifcoin",
532 | "Crypto.WLD/USD": "worldcoin-wld",
533 | "Crypto.WLFI/USD": "world-liberty-financial",
534 | "Crypto.WOJAK/USD": "wojak",
535 | "Crypto.WOM/USD": "wombat-exchange",
536 | "Crypto.WOO/USD": "woo-network",
537 | "Crypto.WSTETH/USD": "wrapped-steth",
538 | "Crypto.XAI/USD": "xai-blockchain",
539 | "Crypto.XAUT/USD": "tether-gold",
540 | "Crypto.XBTC/USD": "okx-wrapped-btc",
541 | "Crypto.XDC/USD": "xdce-crowd-sale",
542 | "Crypto.XEC/USD": "ecash",
543 | "Crypto.XION/USD": "xion-2",
544 | "Crypto.XLM/USD": "stellar",
545 | "Crypto.XMR/USD": "monero",
546 | "Crypto.XPL/USD": "plasma",
547 | "Crypto.XPRT/USD": "persistence",
548 | "Crypto.XRD/USD": "radix",
549 | "Crypto.XRP/USD": "ripple",
550 | "Crypto.XSGD/USD": "xsgd",
551 | "Crypto.XTZ/USD": "tezos",
552 | "Crypto.YFI/USD": "yearn-finance",
553 | "Crypto.YU/USD": "yu",
554 | "Crypto.YZY/USD": "swasticoin",
555 | "Crypto.ZBTC/USD": "zeus-netwok-zbtc",
556 | "Crypto.ZEC/USD": "zcash",
557 | "Crypto.ZEN/USD": "zencash",
558 | "Crypto.ZEREBRO/USD": "zerebro",
559 | "Crypto.ZETA/USD": "zetachain",
560 | "Crypto.ZEUS/USD": "zeus-2",
561 | "Crypto.ZEX/USD": "zeta",
562 | "Crypto.ZIL/USD": "zilliqa",
563 | "Crypto.ZK/USD": "zksync",
564 | "Crypto.ZORA/USD": "zora",
565 | "Crypto.ZRO/USD": "layerzero"
566 | }
--------------------------------------------------------------------------------
/scripts/build_coingecko_mapping.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Script to build CoinGecko mapping file from Pyth Hermes API and CoinGecko API.
4 |
5 | This script:
6 | 1. Fetches all price feeds from Pyth Hermes API
7 | 2. Extracts base symbols (especially for Crypto assets)
8 | 3. Gets CoinGecko coin list
9 | 4. Matches using Pyth description (most reliable) and symbol matching
10 | 5. Generates the mapping file with warnings for non-100% matches
11 | """
12 |
13 | import json
14 | import sys
15 | import time
16 | from difflib import SequenceMatcher
17 | from typing import Any, Dict, List, Optional, Tuple
18 |
19 | import requests
20 | from loguru import logger
21 | from pycoingecko import CoinGeckoAPI
22 |
23 | # Configure logger
24 | logger.remove()
25 | logger.add(
26 | sys.stderr,
27 | level="INFO",
28 | format="{time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {message}",
29 | )
30 |
31 | HERMES_API_URL = "https://hermes.pyth.network/v2/price_feeds"
32 | COINGECKO_API = CoinGeckoAPI()
33 |
34 | # Known mappings for validation only (not used in matching logic)
35 | # Format: Pyth symbol -> CoinGecko ID
36 | KNOWN_MAPPINGS = {
37 | "Crypto.BTC/USD": "bitcoin",
38 | "Crypto.ETH/USD": "ethereum",
39 | "Crypto.USDT/USD": "tether",
40 | "Crypto.USDC/USD": "usd-coin",
41 | "Crypto.BNB/USD": "binancecoin",
42 | "Crypto.SOL/USD": "solana",
43 | "Crypto.XRP/USD": "ripple",
44 | "Crypto.DOGE/USD": "dogecoin",
45 | "Crypto.ADA/USD": "cardano",
46 | "Crypto.AVAX/USD": "avalanche-2",
47 | "Crypto.DOT/USD": "polkadot",
48 | "Crypto.MATIC/USD": "matic-network",
49 | "Crypto.LINK/USD": "chainlink",
50 | "Crypto.UNI/USD": "uniswap",
51 | "Crypto.ATOM/USD": "cosmos",
52 | "Crypto.LTC/USD": "litecoin",
53 | "Crypto.BCH/USD": "bitcoin-cash",
54 | "Crypto.XLM/USD": "stellar",
55 | "Crypto.ALGO/USD": "algorand",
56 | "Crypto.VET/USD": "vechain",
57 | "Crypto.ICP/USD": "internet-computer",
58 | "Crypto.FIL/USD": "filecoin",
59 | "Crypto.TRX/USD": "tron",
60 | "Crypto.ETC/USD": "ethereum-classic",
61 | "Crypto.EOS/USD": "eos",
62 | "Crypto.AAVE/USD": "aave",
63 | "Crypto.MKR/USD": "maker",
64 | "Crypto.COMP/USD": "compound-governance-token",
65 | "Crypto.YFI/USD": "yearn-finance",
66 | "Crypto.SNX/USD": "havven",
67 | "Crypto.SUSHI/USD": "sushi",
68 | "Crypto.CRV/USD": "curve-dao-token",
69 | "Crypto.1INCH/USD": "1inch",
70 | "Crypto.ENJ/USD": "enjincoin",
71 | "Crypto.BAT/USD": "basic-attention-token",
72 | "Crypto.ZRX/USD": "0x",
73 | "Crypto.MANA/USD": "decentraland",
74 | "Crypto.SAND/USD": "the-sandbox",
75 | "Crypto.GALA/USD": "gala",
76 | "Crypto.AXS/USD": "axie-infinity",
77 | "Crypto.CHZ/USD": "chiliz",
78 | "Crypto.FLOW/USD": "flow",
79 | "Crypto.NEAR/USD": "near",
80 | "Crypto.FTM/USD": "fantom",
81 | "Crypto.HBAR/USD": "hedera-hashgraph",
82 | "Crypto.EGLD/USD": "elrond-erd-2",
83 | "Crypto.THETA/USD": "theta-token",
84 | "Crypto.ZIL/USD": "zilliqa",
85 | "Crypto.IOTA/USD": "iota",
86 | "Crypto.ONE/USD": "harmony",
87 | "Crypto.WAVES/USD": "waves",
88 | "Crypto.XTZ/USD": "tezos",
89 | "Crypto.DASH/USD": "dash",
90 | "Crypto.ZEC/USD": "zcash",
91 | "Crypto.XMR/USD": "monero",
92 | "Crypto.ANC/USD": "anchor-protocol",
93 | "Crypto.APE/USD": "apecoin",
94 | "Crypto.ATLAS/USD": "star-atlas",
95 | "Crypto.AUST/USD": "anchorust",
96 | "Crypto.BETH/USD": "binance-eth",
97 | "Crypto.BRZ/USD": "brz",
98 | "Crypto.BUSD/USD": "binance-usd",
99 | "Crypto.C98/USD": "coin98",
100 | "Crypto.COPE/USD": "cope",
101 | "Crypto.CUSD/USD": "celo-dollar",
102 | "Crypto.FIDA/USD": "bonfida",
103 | "Crypto.FTT/USD": "ftx-token",
104 | "Crypto.GMT/USD": "stepn",
105 | "Crypto.GOFX/USD": "goosefx",
106 | "Crypto.HXRO/USD": "hxro",
107 | "Crypto.INJ/USD": "injective-protocol",
108 | "Crypto.JET/USD": "jet",
109 | "Crypto.LUNA/USD": "terra-luna-2",
110 | "Crypto.LUNC/USD": "terra-luna",
111 | "Crypto.MER/USD": "mercurial",
112 | "Crypto.MIR/USD": "mirror-protocol",
113 | "Crypto.MNGO/USD": "mango-markets",
114 | "Crypto.MSOL/USD": "msol",
115 | "Crypto.ORCA/USD": "orca",
116 | "Crypto.PAI/USD": "parrot-usd",
117 | "Crypto.PORT/USD": "port-finance",
118 | "Crypto.RAY/USD": "raydium",
119 | "Crypto.SBR/USD": "saber",
120 | "Crypto.SCNSOL/USD": "socean-staked-sol",
121 | "Crypto.SLND/USD": "solend",
122 | "Crypto.SNY/USD": "synthetify-token",
123 | "Crypto.SRM/USD": "serum",
124 | "Crypto.STEP/USD": "step-finance",
125 | "Crypto.STSOL/USD": "lido-staked-sol",
126 | "Crypto.TUSD/USD": "true-usd",
127 | "Crypto.USTC/USD": "terrausd",
128 | "Crypto.VAI/USD": "vai",
129 | "Crypto.XVS/USD": "venus",
130 | "Crypto.ZBC/USD": "zebec-protocol",
131 | }
132 |
133 |
134 | def normalize_symbol(symbol: str) -> str:
135 | """Normalize symbol - only remove suffixes separated by / or -."""
136 | original = symbol.upper().strip()
137 | # Only remove suffixes if they're separated by / or -
138 | for suffix in ["-USD", "/USD", "-USDT", "/USDT", "-USDC", "/USDC"]:
139 | if original.endswith(suffix):
140 | return original[: -len(suffix)].strip()
141 | return original
142 |
143 |
144 | def is_non_canonical(coin_id: str, coin_name: str) -> bool:
145 | """Check if coin is bridged/peg/wrapped (non-canonical)."""
146 | text = (coin_id + " " + coin_name).lower()
147 | return any(
148 | term in text
149 | for term in ["bridged", "peg", "wrapped", "wormhole", "binance-peg", "mapped-"]
150 | )
151 |
152 |
153 | def normalize_text(text: str) -> str:
154 | """Normalize text for matching (remove separators, lowercase)."""
155 | return (
156 | text.lower().replace("-", "").replace("_", "").replace(" ", "").replace("/", "")
157 | )
158 |
159 |
160 | def match_by_description(
161 | description: str, coins: List[Dict[str, Any]]
162 | ) -> Optional[Dict[str, Any]]:
163 | """
164 | Match coin using Pyth description (most reliable method).
165 | Description format: "COIN_NAME / US DOLLAR" or similar.
166 | """
167 | if not description:
168 | return None
169 |
170 | # Extract words from description (e.g., "UNISWAP / US DOLLAR" -> ["uniswap"])
171 | # Get the first significant word (usually the coin name)
172 | desc_parts = description.upper().split("/")[0].strip() # Get part before "/"
173 | desc_words = [
174 | w.replace("-", "").replace("_", "").lower()
175 | for w in desc_parts.replace("-", " ").split()
176 | if len(w) > 2 and w.lower() not in ["usd", "us", "dollar", "euro", "eur", "and"]
177 | ]
178 |
179 | # Also create combined version for multi-word matches (e.g., "USD COIN" -> "usdcoin")
180 | desc_combined = "".join(desc_words)
181 |
182 | # First pass: find exact matches (prefer canonical)
183 | canonical_matches = []
184 | non_canonical_matches = []
185 |
186 | for coin in coins:
187 | coin_id_norm = normalize_text(coin["id"])
188 | coin_name_norm = normalize_text(coin["name"])
189 | is_non_can = is_non_canonical(coin["id"], coin["name"])
190 |
191 | # Check exact word match with coin ID (most reliable)
192 | for word in desc_words:
193 | if word == coin_id_norm:
194 | if is_non_can:
195 | non_canonical_matches.append(coin)
196 | else:
197 | # Return immediately for canonical exact match
198 | return coin
199 |
200 | # Check combined description match
201 | if desc_combined == coin_id_norm:
202 | if is_non_can:
203 | non_canonical_matches.append(coin)
204 | else:
205 | canonical_matches.append(coin)
206 |
207 | # Check if coin name matches
208 | if coin_name_norm in desc_combined or any(
209 | word == coin_name_norm for word in desc_words
210 | ):
211 | if is_non_can:
212 | non_canonical_matches.append(coin)
213 | else:
214 | canonical_matches.append(coin)
215 |
216 | # Return first canonical match, or first non-canonical if no canonical found
217 | if canonical_matches:
218 | return canonical_matches[0]
219 | if non_canonical_matches:
220 | return non_canonical_matches[0]
221 |
222 | return None
223 |
224 |
225 | def score_coin(symbol: str, coin: Dict[str, Any], description: str = "") -> float:
226 | """Score a coin match. Higher is better."""
227 | coin_id = coin["id"].lower()
228 | coin_name = coin["name"].lower()
229 | symbol_lower = symbol.lower()
230 |
231 | # Heavy penalty for non-canonical coins
232 | if is_non_canonical(coin_id, coin_name):
233 | base = 0.1
234 | else:
235 | base = 1.0
236 |
237 | # Penalty for generic names (name == symbol)
238 | if coin_name == symbol_lower:
239 | base *= 0.3
240 |
241 | # Bonus if description matches
242 | if description:
243 | desc_norm = normalize_text(description)
244 | coin_id_norm = normalize_text(coin_id)
245 | if coin_id_norm in desc_norm:
246 | return base + 0.5
247 | coin_name_norm = normalize_text(coin_name)
248 | if coin_name_norm in desc_norm:
249 | return base + 0.3
250 |
251 | # Similarity bonus
252 | similarity = SequenceMatcher(None, symbol_lower, coin_name).ratio()
253 | return base * (0.5 + similarity * 0.5)
254 |
255 |
256 | def find_coingecko_match(
257 | pyth_base: str,
258 | coin_lookup: Dict[str, Any],
259 | description: str = "",
260 | ) -> Tuple[Optional[str], float, str]:
261 | """Find the best CoinGecko match for a Pyth base symbol.
262 |
263 | Returns:
264 | Tuple of (coin_id, confidence_score, match_type)
265 | """
266 | normalized = normalize_symbol(pyth_base)
267 |
268 | # Strategy 1: Exact symbol match
269 | if normalized in coin_lookup["by_symbol"]:
270 | coins = coin_lookup["by_symbol"][normalized]
271 |
272 | if len(coins) == 1:
273 | return coins[0]["id"], 1.0, "exact_symbol"
274 |
275 | # Try description matching first (most reliable)
276 | desc_match = match_by_description(description, coins)
277 | if desc_match:
278 | return desc_match["id"], 1.0, "exact_symbol"
279 |
280 | # Score all coins and pick best
281 | best_coin = None
282 | best_score = -1.0
283 |
284 | for coin in coins:
285 | score = score_coin(normalized, coin, description)
286 | if score > best_score:
287 | best_score = score
288 | best_coin = coin
289 |
290 | if best_coin:
291 | return best_coin["id"], 1.0, "exact_symbol"
292 |
293 | # Strategy 2: Fuzzy match on symbol and coin ID
294 | best_coin = None
295 | best_score = 0.0
296 |
297 | for coin in coin_lookup["all_coins"]:
298 | coin_symbol = coin["symbol"].upper()
299 | coin_id = coin["id"].upper()
300 |
301 | # Check exact match with coin ID first (most reliable)
302 | if normalized == coin_id:
303 | return coin["id"], 1.0, "fuzzy_symbol"
304 |
305 | # Check similarity with both symbol and ID, prefer ID matches
306 | symbol_score = SequenceMatcher(None, normalized, coin_symbol).ratio()
307 | id_score = SequenceMatcher(None, normalized, coin_id).ratio()
308 |
309 | # Use the better of the two scores, with slight preference for ID matches
310 | score = max(symbol_score, id_score * 1.01) # 1% bonus for ID matches
311 |
312 | if score > best_score and score >= 0.7:
313 | best_score = score
314 | best_coin = coin
315 |
316 | if best_coin:
317 | # If we found an exact ID match, return 100% confidence
318 | if normalized == best_coin["id"].upper():
319 | return best_coin["id"], 1.0, "fuzzy_symbol"
320 | return best_coin["id"], best_score, "fuzzy_symbol"
321 |
322 | return None, 0.0, "no_match"
323 |
324 |
325 | def validate_known_mappings(
326 | mapping: Dict[str, str], coin_lookup: Dict[str, Any]
327 | ) -> Tuple[bool, List[str]]:
328 | """Validate that known mappings match the generated mapping."""
329 | errors = []
330 |
331 | for symbol, expected_id in KNOWN_MAPPINGS.items():
332 | if symbol in mapping:
333 | actual_id = mapping[symbol]
334 | if actual_id != expected_id:
335 | expected_coin = coin_lookup["by_id"].get(expected_id, {})
336 | actual_coin = coin_lookup["by_id"].get(actual_id, {})
337 | expected_name = expected_coin.get("name", expected_id)
338 | actual_name = actual_coin.get("name", actual_id)
339 |
340 | errors.append(
341 | f"❌ VALIDATION FAILED: {symbol} mapped to '{actual_id}' ({actual_name}) "
342 | f"but expected '{expected_id}' ({expected_name})"
343 | )
344 |
345 | return len(errors) == 0, errors
346 |
347 |
348 | def get_pyth_price_feeds() -> List[Dict[str, Any]]:
349 | """Fetch all price feeds from Pyth Hermes API."""
350 | logger.info(f"Fetching price feeds from {HERMES_API_URL}...")
351 | try:
352 | response = requests.get(HERMES_API_URL, timeout=30)
353 | response.raise_for_status()
354 | feeds = response.json()
355 | logger.info(f"Fetched {len(feeds)} price feeds from Hermes API")
356 | time.sleep(1) # Rate limit protection
357 | return feeds
358 | except Exception as e:
359 | logger.error(f"Failed to fetch price feeds from Hermes API: {e}")
360 | sys.exit(1)
361 |
362 |
363 | def get_coingecko_coin_list() -> Dict[str, Any]:
364 | """Fetch CoinGecko coin list and create lookup dictionaries."""
365 | logger.info("Fetching CoinGecko coin list...")
366 | try:
367 | coins = COINGECKO_API.get_coins_list()
368 | logger.info(f"Fetched {len(coins)} coins from CoinGecko")
369 | time.sleep(1) # Rate limit protection
370 |
371 | by_id = {coin["id"]: coin for coin in coins}
372 | by_symbol = {}
373 | for coin in coins:
374 | symbol_upper = coin["symbol"].upper()
375 | if symbol_upper not in by_symbol:
376 | by_symbol[symbol_upper] = []
377 | by_symbol[symbol_upper].append(coin)
378 |
379 | return {"by_id": by_id, "by_symbol": by_symbol, "all_coins": coins}
380 | except Exception as e:
381 | logger.error(f"Failed to fetch CoinGecko coin list: {e}")
382 | sys.exit(1)
383 |
384 |
385 | def get_hermes_prices(symbol_to_feed_id: Dict[str, str]) -> Dict[str, float]:
386 | """Get latest prices from Hermes API for mapped symbols."""
387 | logger.info("Fetching prices from Hermes API...")
388 | hermes_prices = {}
389 |
390 | try:
391 | # Get price updates for all feeds in batches
392 | feed_ids = list(symbol_to_feed_id.values())
393 | batch_size = 50
394 |
395 | for i in range(0, len(feed_ids), batch_size):
396 | batch = feed_ids[i : i + batch_size]
397 | query_string = "?" + "&".join(f"ids[]={feed_id}" for feed_id in batch)
398 | url = f"{HERMES_API_URL.replace('/price_feeds', '/updates/price/latest')}{query_string}"
399 |
400 | response = requests.get(url, timeout=30)
401 | response.raise_for_status()
402 | data = response.json()
403 |
404 | for feed_data in data.get("parsed", []):
405 | feed_id = feed_data.get("id")
406 | price_info = feed_data.get("price", {})
407 | if price_info:
408 | price_str = price_info.get("price", "0")
409 | expo = price_info.get("expo", 0)
410 | try:
411 | price = int(price_str)
412 | # Convert to actual price: price * 10^expo
413 | actual_price = price * (10**expo)
414 | if actual_price > 0:
415 | hermes_prices[feed_id] = actual_price
416 | except (ValueError, TypeError):
417 | continue
418 |
419 | time.sleep(1) # Rate limit protection - wait 1s after each batch
420 |
421 | logger.info(f"Fetched {len(hermes_prices)} prices from Hermes API")
422 | except Exception as e:
423 | logger.warning(f"Failed to fetch Hermes prices: {e}")
424 |
425 | return hermes_prices
426 |
427 |
428 | def get_coingecko_prices(mapping: Dict[str, str]) -> Dict[str, float]:
429 | """Get prices from CoinGecko for mapped coins."""
430 | logger.info("Fetching prices from CoinGecko...")
431 | coingecko_prices = {}
432 |
433 | try:
434 | # Get unique coin IDs
435 | coin_ids = list(set(mapping.values()))
436 |
437 | # CoinGecko API can handle up to ~1000 IDs at once, but let's batch to be safe
438 | batch_size = 200
439 | for i in range(0, len(coin_ids), batch_size):
440 | batch = coin_ids[i : i + batch_size]
441 | prices = COINGECKO_API.get_price(
442 | ids=batch, vs_currencies="usd", include_last_updated_at=False
443 | )
444 |
445 | for coin_id, price_data in prices.items():
446 | if "usd" in price_data:
447 | coingecko_prices[coin_id] = price_data["usd"]
448 |
449 | time.sleep(2) # Rate limit protection - wait 1s after each batch
450 |
451 | logger.info(f"Fetched {len(coingecko_prices)} prices from CoinGecko")
452 | except Exception as e:
453 | logger.warning(f"Failed to fetch CoinGecko prices: {e}")
454 |
455 | return coingecko_prices
456 |
457 |
458 | def validate_prices(
459 | mapping: Dict[str, str],
460 | pyth_feeds: List[Dict[str, Any]],
461 | max_deviation_percent: float = 10.0,
462 | ) -> Tuple[List[str], List[str], Dict[str, Dict[str, float]]]:
463 | """
464 | Validate prices by comparing Hermes and CoinGecko prices.
465 | Returns tuple of (warnings, price_mismatch_symbols, price_details) for significant price differences.
466 | price_details is a dict mapping symbol to {'hermes_price': float, 'coingecko_price': float, 'deviation': float}
467 | """
468 | warnings = []
469 | price_mismatch_symbols = []
470 | price_details = {}
471 |
472 | # Map symbols to feed IDs
473 | symbol_to_feed_id = {}
474 | for feed in pyth_feeds:
475 | attrs = feed.get("attributes", {})
476 | if attrs.get("asset_type") == "Crypto":
477 | symbol = attrs.get("symbol", "")
478 | if symbol and symbol in mapping:
479 | symbol_to_feed_id[symbol] = feed.get("id")
480 |
481 | if not symbol_to_feed_id:
482 | return warnings, price_mismatch_symbols, price_details
483 |
484 | # Get prices from both sources
485 | hermes_prices = get_hermes_prices(symbol_to_feed_id)
486 | coingecko_prices = get_coingecko_prices(mapping)
487 |
488 | # Compare prices
489 | compared = 0
490 | mismatches = 0
491 | for symbol, coin_id in mapping.items():
492 | feed_id = symbol_to_feed_id.get(symbol)
493 |
494 | if feed_id is None:
495 | continue
496 |
497 | hermes_price = hermes_prices.get(feed_id)
498 | cg_price = coingecko_prices.get(coin_id)
499 |
500 | # Skip if either price is missing
501 | if not hermes_price or not cg_price:
502 | continue
503 |
504 | # Skip if CoinGecko price is 0 (coin might not be actively traded)
505 | if cg_price <= 0:
506 | continue
507 |
508 | compared += 1
509 | deviation = abs(hermes_price - cg_price) / cg_price * 100
510 |
511 | # Only warn if deviation is significant and price is meaningful
512 | if deviation > max_deviation_percent and cg_price >= 0.01:
513 | mismatches += 1
514 | warnings.append(
515 | f"⚠️ {symbol} ({coin_id}): Price mismatch - Hermes: ${hermes_price:,.5f}, "
516 | f"CoinGecko: ${cg_price:,.5f} (deviation: {deviation:.2f}%)"
517 | )
518 | price_mismatch_symbols.append(symbol)
519 | price_details[symbol] = {
520 | "hermes_price": hermes_price,
521 | "coingecko_price": cg_price,
522 | "deviation": deviation,
523 | }
524 | logger.warning(
525 | f" Price mismatch: {symbol} ({coin_id}) - "
526 | f"Hermes: ${hermes_price:,.5f} | CoinGecko: ${cg_price:,.5f} | "
527 | f"Deviation: {deviation:.2f}%"
528 | )
529 |
530 | if compared > 0:
531 | logger.info(f"Compared prices for {compared} symbols")
532 | if mismatches > 0:
533 | logger.warning(
534 | f"Found {mismatches} price mismatches (deviation > {max_deviation_percent}%)"
535 | )
536 |
537 | return warnings, price_mismatch_symbols, price_details
538 |
539 |
540 | def build_mapping(
541 | validate_prices_flag: bool = False, max_deviation: float = 10.0
542 | ) -> Tuple[
543 | Dict[str, str], Dict[str, float], List[str], List[str], Dict[str, Dict[str, float]]
544 | ]:
545 | """Build the CoinGecko mapping from Pyth feeds.
546 |
547 | Returns:
548 | Tuple of (mapping, confidence_scores, warnings, price_mismatch_symbols, price_details)
549 | price_details maps symbol to {'hermes_price': float, 'coingecko_price': float, 'deviation': float}
550 | """
551 | pyth_feeds = get_pyth_price_feeds()
552 | coin_lookup = get_coingecko_coin_list()
553 |
554 | # Extract Crypto symbols with base and descriptions
555 | crypto_data = {}
556 | for feed in pyth_feeds:
557 | attrs = feed.get("attributes", {})
558 | if attrs.get("asset_type") == "Crypto":
559 | symbol = attrs.get("symbol", "")
560 | base = attrs.get("base", "")
561 | quote_currency = attrs.get("quote_currency", "")
562 |
563 | if quote_currency != "USD":
564 | continue
565 |
566 | if symbol and base:
567 | if symbol not in crypto_data:
568 | crypto_data[symbol] = {
569 | "base": base,
570 | "description": attrs.get("description", ""),
571 | }
572 |
573 | logger.info(f"Found {len(crypto_data)} unique Crypto symbols quoted in USD")
574 |
575 | # Build mapping
576 | mapping = {}
577 | confidence_scores = {} # Track confidence scores for each symbol
578 | warnings = []
579 |
580 | for symbol in sorted(crypto_data.keys()):
581 | base = crypto_data[symbol]["base"]
582 | description = crypto_data[symbol]["description"]
583 | api_id, score, match_type = find_coingecko_match(base, coin_lookup, description)
584 |
585 | if api_id:
586 | mapping[symbol] = api_id
587 | confidence_scores[symbol] = score
588 | if score < 1.0:
589 | warnings.append(
590 | f"⚠️ {symbol}: Match confidence {score:.2%} ({match_type}) - "
591 | f"matched to '{api_id}'"
592 | )
593 | else:
594 | warnings.append(f"❌ {symbol}: No match found in CoinGecko")
595 |
596 | # Validate against known mappings
597 | is_valid, validation_errors = validate_known_mappings(mapping, coin_lookup)
598 | if not is_valid:
599 | logger.error("\n" + "=" * 60)
600 | logger.error(
601 | "VALIDATION FAILED: Known mappings do not match generated mappings!"
602 | )
603 | logger.error("=" * 60)
604 | for error in validation_errors:
605 | logger.error(error)
606 | logger.error("\nThis indicates the matching algorithm needs improvement.")
607 | logger.error(
608 | "Please fix the matching logic before using the generated mapping."
609 | )
610 | return mapping, confidence_scores, warnings + validation_errors, []
611 |
612 | logger.info("✓ Validation passed: All known mappings match generated mappings")
613 |
614 | # Validate prices if requested
615 | price_mismatch_symbols = []
616 | price_details = {}
617 | if validate_prices_flag:
618 | price_warnings, price_mismatch_symbols, price_details = validate_prices(
619 | mapping, pyth_feeds, max_deviation
620 | )
621 | warnings.extend(price_warnings)
622 |
623 | return mapping, confidence_scores, warnings, price_mismatch_symbols, price_details
624 |
625 |
626 | def load_existing_mapping(file_path: str) -> Dict[str, str]:
627 | """Load existing mapping file if it exists."""
628 | try:
629 | with open(file_path, "r") as f:
630 | content = f.read().strip()
631 | if content.startswith("{"):
632 | data = json.loads(content)
633 | # Handle both old format (dict) and new format (string)
634 | if data and isinstance(list(data.values())[0], dict):
635 | # Convert old format to new format
636 | return {
637 | k: v.get("api", v.get("market", "")) for k, v in data.items()
638 | }
639 | return data
640 | except FileNotFoundError:
641 | pass
642 | except (json.JSONDecodeError, (KeyError, IndexError)):
643 | logger.warning(f"Could not parse existing mapping file {file_path}")
644 | return {}
645 |
646 |
647 | def compare_mappings(
648 | new_mapping: Dict[str, str], existing_mapping: Dict[str, str]
649 | ) -> List[str]:
650 | """Compare new mapping with existing and return differences."""
651 | differences = []
652 | all_symbols = set(list(new_mapping.keys()) + list(existing_mapping.keys()))
653 |
654 | for symbol in all_symbols:
655 | new_entry = new_mapping.get(symbol)
656 | existing_entry = existing_mapping.get(symbol)
657 |
658 | if new_entry and existing_entry:
659 | if new_entry != existing_entry:
660 | differences.append(
661 | f" {symbol}: Changed from '{existing_entry}' to '{new_entry}'"
662 | )
663 | elif new_entry and not existing_entry:
664 | differences.append(f" {symbol}: New entry -> '{new_entry}'")
665 | elif existing_entry and not new_entry:
666 | differences.append(f" {symbol}: Removed (was '{existing_entry}')")
667 |
668 | return differences
669 |
670 |
671 | def main() -> int:
672 | """Main function."""
673 | import argparse
674 |
675 | parser = argparse.ArgumentParser(
676 | description="Build CoinGecko mapping file from Pyth Hermes API"
677 | )
678 | parser.add_argument(
679 | "-o",
680 | "--output",
681 | default="coingecko_mapping.json",
682 | help="Output file path (default: coingecko_mapping.json)",
683 | )
684 | parser.add_argument(
685 | "-e", "--existing", help="Path to existing mapping file to compare against"
686 | )
687 | parser.add_argument(
688 | "--no-validate-prices",
689 | action="store_true",
690 | help="Skip price validation (by default, prices are validated)",
691 | )
692 | parser.add_argument(
693 | "--max-price-deviation",
694 | type=float,
695 | default=1.0,
696 | help="Maximum price deviation percentage to warn about (default: 1.0%%)",
697 | )
698 | args = parser.parse_args()
699 |
700 | logger.info("Starting CoinGecko mapping generation...")
701 |
702 | existing_mapping = {}
703 | if args.existing:
704 | existing_mapping = load_existing_mapping(args.existing)
705 | if existing_mapping:
706 | logger.info(
707 | f"Loaded {len(existing_mapping)} existing mappings from {args.existing}"
708 | )
709 |
710 | (
711 | mapping,
712 | confidence_scores,
713 | warnings,
714 | price_mismatch_symbols,
715 | price_details,
716 | ) = build_mapping(
717 | validate_prices_flag=not args.no_validate_prices,
718 | max_deviation=args.max_price_deviation,
719 | )
720 |
721 | # Check if validation failed
722 | validation_failed = any("VALIDATION FAILED" in w for w in warnings)
723 | if validation_failed:
724 | logger.error("\nExiting with error code due to validation failures.")
725 | return 1
726 |
727 | # Filter out symbols with low confidence (< 1.0) or price mismatches
728 | excluded_symbols = set()
729 | excluded_low_confidence = []
730 | excluded_price_mismatch = []
731 |
732 | # Find symbols with confidence < 1.0
733 | for symbol, score in confidence_scores.items():
734 | if score < 1.0:
735 | excluded_symbols.add(symbol)
736 | excluded_low_confidence.append(symbol)
737 |
738 | # Find symbols with price mismatches
739 | for symbol in price_mismatch_symbols:
740 | excluded_symbols.add(symbol)
741 | excluded_price_mismatch.append(symbol)
742 |
743 | # Create filtered mapping (only high confidence, no price mismatches)
744 | filtered_mapping = {
745 | symbol: coin_id
746 | for symbol, coin_id in mapping.items()
747 | if symbol not in excluded_symbols
748 | }
749 |
750 | # Log excluded entries for manual review
751 | if excluded_low_confidence or excluded_price_mismatch:
752 | logger.warning("\n" + "=" * 60)
753 | logger.warning("EXCLUDED ENTRIES (for manual review):")
754 | logger.warning("=" * 60)
755 |
756 | if excluded_low_confidence:
757 | logger.warning(
758 | f"\n⚠️ Low confidence matches (< 100%) - {len(excluded_low_confidence)} entries:"
759 | )
760 | for symbol in sorted(excluded_low_confidence):
761 | coin_id = mapping.get(symbol, "N/A")
762 | score = confidence_scores.get(symbol, 0.0)
763 | logger.warning(f" {symbol}: {coin_id} (confidence: {score:.2%})")
764 |
765 | if excluded_price_mismatch:
766 | logger.warning(
767 | f"\n⚠️ Price mismatches - {len(excluded_price_mismatch)} entries:"
768 | )
769 | for symbol in sorted(excluded_price_mismatch):
770 | coin_id = mapping.get(symbol, "N/A")
771 | if symbol in price_details:
772 | details = price_details[symbol]
773 | hermes_price = details["hermes_price"]
774 | cg_price = details["coingecko_price"]
775 | deviation = details["deviation"]
776 | logger.warning(
777 | f" {symbol} ({coin_id}): "
778 | f"Hermes: ${hermes_price:,.5f} | "
779 | f"CoinGecko: ${cg_price:,.5f} | "
780 | f"Deviation: {deviation:.2f}%"
781 | )
782 | else:
783 | logger.warning(f" {symbol}: {coin_id}")
784 |
785 | # Output excluded entries as JSON for easy manual addition
786 | excluded_mapping = {
787 | symbol: mapping[symbol] for symbol in excluded_symbols if symbol in mapping
788 | }
789 | if excluded_mapping:
790 | excluded_file = args.output.replace(".json", "_excluded.json")
791 | with open(excluded_file, "w") as f:
792 | json.dump(excluded_mapping, f, indent=2, sort_keys=True)
793 | logger.warning(
794 | f"\n📝 Excluded entries saved to {excluded_file} for manual review"
795 | )
796 | logger.warning("=" * 60 + "\n")
797 |
798 | # Output results
799 | logger.info(f"\n{'=' * 60}")
800 | logger.info(f"Generated mapping for {len(mapping)} symbols")
801 | if excluded_symbols:
802 | logger.info(
803 | f"Excluded {len(excluded_symbols)} entries (low confidence or price mismatch)"
804 | )
805 | logger.info(f"Final mapping contains {len(filtered_mapping)} symbols")
806 | logger.info(f"{'=' * 60}\n")
807 |
808 | # Compare with existing if provided
809 | if existing_mapping:
810 | differences = compare_mappings(filtered_mapping, existing_mapping)
811 | if differences:
812 | logger.info(f"Found {len(differences)} differences from existing mapping:")
813 | for diff in differences:
814 | logger.info(diff)
815 | logger.info("")
816 |
817 | # Print warnings (excluding excluded entries from warnings count)
818 | other_warnings = [
819 | w
820 | for w in warnings
821 | if not any(symbol in w for symbol in excluded_symbols)
822 | and "VALIDATION FAILED" not in w
823 | ]
824 | if other_warnings:
825 | logger.warning(f"Found {len(other_warnings)} other warnings:")
826 | for warning in other_warnings:
827 | logger.warning(warning)
828 | logger.info("")
829 |
830 | # Output JSON (only high confidence, no price mismatches)
831 | with open(args.output, "w") as f:
832 | json.dump(filtered_mapping, f, indent=2, sort_keys=True)
833 |
834 | logger.info(f"✓ Mapping saved to {args.output}")
835 |
836 | # Summary
837 | fuzzy_matches = len(excluded_low_confidence)
838 | no_matches = len([w for w in warnings if "No match found" in w])
839 | exact_matches = len(filtered_mapping)
840 |
841 | logger.info(f"\nSummary:")
842 | logger.info(f" Total symbols processed: {len(mapping)}")
843 | logger.info(f" Included in final mapping: {exact_matches} (exact matches only)")
844 | logger.info(f" Excluded - low confidence: {fuzzy_matches}")
845 | logger.info(f" Excluded - price mismatch: {len(excluded_price_mismatch)}")
846 | logger.info(f" No matches found: {no_matches}")
847 |
848 | return 0
849 |
850 |
851 | if __name__ == "__main__":
852 | sys.exit(main())
853 |
--------------------------------------------------------------------------------