├── .github
├── FUNDING.yml
└── workflows
│ └── main.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── dolphie
├── App.py
├── DataTypes.py
├── Dolphie.py
├── Dolphie.tcss
├── Modules
│ ├── ArgumentParser.py
│ ├── CommandManager.py
│ ├── Functions.py
│ ├── ManualException.py
│ ├── MetricManager.py
│ ├── MySQL.py
│ ├── PerformanceSchemaMetrics.py
│ ├── Queries.py
│ ├── ReplayManager.py
│ └── TabManager.py
├── Panels
│ ├── DDL.py
│ ├── Dashboard.py
│ ├── MetadataLocks.py
│ ├── PerformanceSchemaMetrics.py
│ ├── Processlist.py
│ ├── ProxySQLCommandStats.py
│ ├── ProxySQLDashboard.py
│ ├── ProxySQLHostgroupSummary.py
│ ├── ProxySQLProcesslist.py
│ ├── ProxySQLQueryRules.py
│ ├── Replication.py
│ └── StatementsSummaryMetrics.py
└── Widgets
│ ├── AutoComplete.py
│ ├── CommandModal.py
│ ├── CommandScreen.py
│ ├── EventLogScreen.py
│ ├── ProxySQLThreadScreen.py
│ ├── SpinnerWidget.py
│ ├── TabSetupModal.py
│ ├── ThreadScreen.py
│ └── TopBar.py
├── examples
├── dolphie-daemon.cnf
└── dolphie.service
├── poetry.lock
└── pyproject.toml
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [charles-001]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
12 | polar: # Replace with a single Polar username
13 | buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
14 | thanks_dev: # Replace with a single thanks.dev username
15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
16 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: main
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v3
16 |
17 | - name: Install poetry
18 | run: pipx install poetry
19 |
20 | - uses: actions/setup-python@v4
21 | with:
22 | python-version-file: pyproject.toml
23 | cache: poetry
24 |
25 | - run: poetry install
26 |
27 | - run: poetry build
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | dist/
3 |
4 | .idea
5 | .history
6 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim
2 |
3 | # Install gawk for parsing pyproject.toml
4 | RUN apt-get update && apt-get install -y --no-install-recommends gawk
5 |
6 | # Extract the version from pyproject.toml and store it in a variable
7 | ARG DOLPHIE_VERSION
8 | WORKDIR /app
9 | COPY pyproject.toml ./
10 |
11 | # Extract version from pyproject.toml
12 | RUN DOLPHIE_VERSION=$(gawk -F'"' '/^version =/ {print $2}' pyproject.toml) \
13 | && pip3 install --no-cache-dir dolphie==$DOLPHIE_VERSION
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Dolphie
2 |
3 |
4 | 
5 | Your single pane of glass for real-time analytics into MySQL/MariaDB & ProxySQL
6 |
7 |
8 |
9 |
10 |
11 | 
12 |
13 |
14 |
15 |
16 |
17 | ## Installation
18 |
19 | Requires Python 3.8.1+
20 |
21 | #### Using PyPi
22 |
23 | ```shell
24 | $ pip install dolphie
25 | ```
26 |
27 | #### Using Poetry
28 |
29 | ```shell
30 | $ curl -sSL https://install.python-poetry.org | python3 -
31 |
32 | $ poetry install
33 | ```
34 |
35 | #### Using Homebrew
36 |
37 | If you are a [Homebrew](https://brew.sh/) user, you can install [dolphie](https://formulae.brew.sh/formula/dolphie) via
38 |
39 | ```sh
40 | $ brew install dolphie
41 | ```
42 |
43 | #### Using Docker
44 |
45 | ```sh
46 | $ docker pull ghcr.io/charles-001/dolphie:latest
47 | $ docker run -dit --name dolphie ghcr.io/charles-001/dolphie:latest
48 | $ docker exec -it dolphie dolphie --tab-setup
49 | ```
50 |
51 | ## Usage
52 |
53 | ```
54 | positional arguments:
55 | uri Use a URI string for credentials (mysql/proxysql) - format: mysql://user:password@host:port (port is optional with default 3306, or 6032 for ProxySQL)
56 |
57 | options:
58 | --help show this help message and exit
59 | --tab-setup Start Dolphie by showing the Tab Setup modal instead of automatically connecting with the specified options
60 | -C , --cred-profile Credential profile to use. See below for more information
61 | -u , --user Username
62 | -p , --password Password
63 | -h , --host Hostname/IP address
64 | -P , --port Port (socket has precedence)
65 | -S , --socket Socket file
66 | -c , --config-file Dolphie's config file to use. Options are read from these files in the given order: ['/etc/dolphie.cnf', '/etc/dolphie/dolphie.cnf', '~/.dolphie.cnf']
67 | -m , --mycnf-file MySQL config file path to use. This should use [client] section [default: ~/.my.cnf]
68 | -l , --login-path Specify login path to use with mysql_config_editor's file ~/.mylogin.cnf for encrypted login credentials [default: client]
69 | -r , --refresh-interval
70 | The time, in seconds, between each data collection and processing cycle [default: 1]
71 | --host-cache-file Resolve IPs to hostnames when your DNS is unable to. Each IP/hostname pair should be on its own line using format ip=hostname [default: ~/dolphie_host_cache]
72 | --tab-setup-file Specify location of file that stores the available hosts to use in Tab Setup modal [default: ~/dolphie_hosts]
73 | --heartbeat-table (MySQL only) If your hosts use pt-heartbeat, specify table in format db.table to use the timestamp it has for replication lag instead of Seconds_Behind_Master from SHOW REPLICA STATUS
74 | --ssl-mode Desired security state of the connection to the host. Supports: REQUIRED/VERIFY_CA/VERIFY_IDENTITY [default: OFF]
75 | --ssl-ca Path to the file that contains a CA (certificate authority)
76 | --ssl-cert Path to the file that contains a certificate
77 | --ssl-key Path to the file that contains a private key for the certificate
78 | --panels What panels to display on startup separated by a comma. Supports: ['dashboard', 'processlist', 'graphs', 'replication', 'metadata_locks', 'ddl', 'pfs_metrics', 'statements_summary', 'proxysql_hostgroup_summary', 'proxysql_mysql_query_rules', 'proxysql_command_stats'], [default: ['dashboard', 'processlist']]
79 | --graph-marker What marker to use for graphs (available options: https://tinyurl.com/dolphie-markers) [default: braille]
80 | --pypi-repo What PyPi repository to use when checking for a new version default: [https://pypi.org/pypi/dolphie/json]
81 | -H , --hostgroup This is used for creating tabs and connecting to them for hosts you specify in Dolphie's config file under a hostgroup section. As an example, you'll have a section called [cluster1] then below it you will list each host on a new line in the format key=host (keys have no meaning). Hosts support optional port (default is whatever port parameter is) in the format host:port. You can also name the tabs by suffixing ~tab_name to the host (i.e. 1=host~tab_name)
82 | -R, --record Enables recording of Dolphie's data to a replay file. Note: This can use significant disk space. Monitor accordingly!
83 | -D, --daemon Starts Dolphie in daemon mode. This will not show the TUI and is designed be put into the background with whatever solution you decide to use. Automatically enables --record. This mode is solely used for recording data to a replay file
84 | --daemon-log-file Full path of the log file for daemon mode
85 | --daemon-panels Which panels to run queries for in daemon mode separated by a comma. This can control significant load if the queries are responsible. Dashboard/Replication panels cannot be turned off. Supports: ['processlist', 'metadata_locks', 'pfs_metrics', 'statements_summary', 'proxysql_hostgroup_summary'], [default: ['processlist', 'metadata_locks', 'pfs_metrics']]
86 | --replay-file Specify the full path of the replay file to load and enable replay mode
87 | --replay-dir Directory to store replay data files
88 | --replay-retention-hours
89 | Number of hours to keep replay data. Data will be purged every hour [default: 48]
90 | --exclude-notify-vars
91 | Dolphie will let you know when a global variable has been changed. If you have variables that change frequently and you don't want to see them, you can specify which ones with this option separated by a comma (i.e. --exclude-notify-vars=variable1,variable2)
92 | --show-trxs-only (MySQL only) Start with only showing threads that have an active transaction
93 | --additional-columns Start with additional columns in Processlist panel
94 | --debug-options Display options that are set and what they're set by (command-line, dolphie config, etc) then exit. WARNING: This will show passwords and other sensitive information in plain text
95 | -V, --version Display version and exit
96 |
97 | Order of precedence for methods that pass options to Dolphie:
98 | 1. Command-line
99 | 2. Credential profile (set by --cred-profile)
100 | 3. Environment variables
101 | 4. Dolphie's config (set by --config-file)
102 | 5. ~/.mylogin.cnf (mysql_config_editor)
103 | 6. ~/.my.cnf (set by --mycnf-file)
104 |
105 | Credential profiles can be defined in Dolphie's config file as a way to store credentials for easy access.
106 | A profile can be created by adding a section in the config file with the format: [credential_profile_]
107 | When using a credential profile, do not include the prefix 'credential_profile' (i.e. -C production)
108 | The following options are supported in credential profiles:
109 | user
110 | password
111 | socket
112 | ssl_mode REQUIRED/VERIFY_CA/VERIFY_IDENTITY
113 | ssl_ca
114 | ssl_cert
115 | ssl_key
116 | mycnf_file
117 | login_path
118 |
119 | MySQL my.cnf file supports these options under [client] section:
120 | host
121 | user
122 | password
123 | port
124 | socket
125 | ssl_mode REQUIRED/VERIFY_CA/VERIFY_IDENTITY
126 | ssl_ca
127 | ssl_cert
128 | ssl_key
129 |
130 | Login path file supports these options:
131 | host
132 | user
133 | password
134 | port
135 | socket
136 |
137 | Environment variables support these options:
138 | DOLPHIE_USER
139 | DOLPHIE_PASSWORD
140 | DOLPHIE_HOST
141 | DOLPHIE_PORT
142 | DOLPHIE_SOCKET
143 | DOLPHIE_SSL_MODE REQUIRED/VERIFY_CA/VERIFY_IDENTITY
144 | DOLPHIE_SSL_CA
145 | DOLPHIE_SSL_CERT
146 | DOLPHIE_SSL_KEY
147 |
148 | Dolphie's config supports these options under [dolphie] section:
149 | (bool) tab_setup
150 | (str) credential_profile
151 | (str) user
152 | (str) password
153 | (str) host
154 | (int) port
155 | (str) socket
156 | (str) ssl_mode
157 | (str) ssl_ca
158 | (str) ssl_cert
159 | (str) ssl_key
160 | (str) mycnf_file
161 | (str) login_path
162 | (str) host_cache_file
163 | (str) tab_setup_file
164 | (int) refresh_interval
165 | (str) heartbeat_table
166 | (comma-separated str) startup_panels
167 | (str) graph_marker
168 | (str) pypi_repository
169 | (str) hostgroup
170 | (bool) show_trxs_only
171 | (bool) show_additional_query_columns
172 | (bool) record_for_replay
173 | (bool) daemon_mode
174 | (comma-separated str) daemon_mode_panels
175 | (str) daemon_mode_log_file
176 | (str) replay_file
177 | (str) replay_dir
178 | (int) replay_retention_hours
179 | (comma-separated str) exclude_notify_global_vars
180 | ```
181 |
182 | ## Supported MySQL versions
183 |
184 | - MySQL/Percona Server 5.6/5.7/8.x/9.x
185 | - AWS RDS/Aurora
186 | - Azure MySQL
187 |
188 | ## Supported MariaDB versions
189 |
190 | - MariaDB 5.5/10.0/11.0+
191 | - AWS RDS
192 | - Azure MariaDB
193 |
194 | ## Supported ProxySQL versions
195 |
196 | - ProxySQL 2.6+ (could work on previous versions but not tested)
197 |
198 | Note: Use `admin` user instead of `stats` user so you can use all features
199 |
200 | ## MySQL Grants required
201 |
202 | #### Least privilege
203 |
204 | 1. PROCESS (only if you switch to using processlist via `P` command)
205 | 2. SELECT to `performance_schema` + `pt-heartbeat table` (if used)
206 | 3. REPLICATION CLIENT/REPLICATION SLAVE
207 |
208 | #### Recommended
209 |
210 | 1. PROCESS (only if you switch to using processlist via `P` command)
211 | 2. Global SELECT access (good for explaining queries, listing all databases, etc)
212 | 3. REPLICATION CLIENT/REPLICATION SLAVE
213 | 4. SUPER (required if you want to kill queries)
214 |
215 | ## Record & Replay
216 |
217 | Dolphie is capable of recording your live session data that can be used in a future replay if needed. To begin recording, specify the `--record` option along with `--replay-dir` and you're good to go! The data will be saved in a SQLite database compressed with ZSTD for efficient storage management.
218 |
219 | To view a replay from either a live session or daemon mode, specify the `--replay-file` option or bring up the `Tab Setup` modal. Replays enable you to navigate through the recorded data as if you were observing Dolphie in real-time at the exact time you need to investigate. The replay interface features intuitive controls for stepping backward, moving forward, playing/pausing, and jumping to specific timestamps. While some commands or features may be restricted in replay mode, all core functionalities for effective review and troubleshooting remain accessible.
220 |
221 | ## Daemon Mode
222 |
223 | If you need Dolphie running incognito while always recording data to capture those critical moments when a database stall causes an incident or a tricky performance issue slips past other monitoring tools, then look no further! Daemon mode is the solution. Purpose-built for nonstop recording, it ensures you never miss the insights that matter most.
224 |
225 | To activate Daemon mode, specify the `--daemon` option, which will automatically enable `--record`. This will transform Dolphie into a resource-efficient, passive, always-on monitoring process that continuously records data. It removes Textual's TUI and creates a log file for messages while also printing them to the console.
226 |
227 | To run Dolphie in the background using daemon mode, I recommend `systemctl` for its flexibility and management capabilities. To see how to set that up, refer to the [service configuration example](https://github.com/charles-001/dolphie/blob/main/examples/dolphie.service). While alternatives like `nohup` or `tmux` can be used, they are not advisable due to their limited management features. Additionally, check out the [config example](https://github.com/charles-001/dolphie/blob/main/examples/dolphie-daemon.cnf) as a helpful starting point for setting up this mode.
228 |
229 | In Daemon mode, metrics are retained for the last 10 minutes to support graphing, with performance schema metric deltas automatically reset at 10-minute intervals. This approach keeps data fresh and relevant, providing an accurate view of recent activity.
230 |
231 | **Note**: Daemon mode's replay file can consume significant disk space, particularly on busy servers. To minimize disk usage, adjust the `--replay-retention-hours` and `--refresh-interval` options to control data retention and collection frequency.
232 |
233 | Example log messages in daemon mode:
234 |
235 | ```
236 | [INFO] Starting Dolphie in daemon mode with a refresh interval of 1s
237 | [INFO] Log file: /var/log/dolphie/dolphie.log
238 | [INFO] Connected to MySQL with Process ID 324
239 | [INFO] Replay SQLite file: /var/lib/dolphie/replays/localhost/daemon.db (24 hours retention)
240 | [INFO] Connected to SQLite
241 | [INFO] Replay database metadata - Host: localhost, Port: 3306, Source: MySQL (Percona Server), Dolphie: 6.3.0
242 | [INFO] ZSTD compression dictionary trained with 10 samples (size: 52.56KB)
243 | [WARNING] Read-only mode changed: R/W -> RO
244 | [INFO] Global variable innodb_io_capacity changed: 1000 -> 2000
245 | ```
246 |
247 | ## System Utilization in the Dashboard Panel
248 |
249 | The System Utilization section in the Dashboard panel will only display when Dolphie is running on the same host as the server you're connected to. It displays the following information:
250 | - Uptime
251 | - CPU Usage (Percentage, # of cores)
252 | - Load Averages (1, 5, and 15 minutes)
253 | - Memory Usage (Percentage, Used/Total)
254 | - Swap Usage (Used/Total)
255 | - Network Traffic (Down, Up)
256 |
257 | Example:
258 |
259 |
260 |
261 |
262 | ## Credential Profiles
263 |
264 | Credential profiles can be defined in Dolphie's config file as a way to store credentials for easy access. A profile can be created by adding a section in the config file with the format: `[credential_profile_]`
265 |
266 | The following options are supported in credential profiles:
267 |
268 | - user
269 | - password
270 | - socket
271 | - ssl_mode REQUIRED/VERIFY_CA/VERIFY_IDENTITY
272 | - ssl_ca
273 | - ssl_cert
274 | - ssl_key
275 | - mycnf_file
276 | - login_path
277 |
278 | Example:
279 |
280 | ```ini
281 | [credential_profile_dev]
282 | user = dev_user
283 | password = dev_password
284 |
285 | [credential_profile_prod]
286 | mycnf_file = /secure/path/to/prod.cnf
287 | ```
288 |
289 | To use a credential profile, you can specify it with `-C`/`--cred-profile` option without using the prefix `credential_profile` (i.e. `-C prod`) when starting Dolphie. Hostgroups can also use credential profiles (see below)
290 |
291 | ## Hostgroups
292 |
293 | Hostgroups are a way to easily connect to multiple hosts at once. To set this up, you will create a section in Dolphie's config file with the name you want the hostgroup to be and list each host on a new line in the format `key=` (keys have no meaning). Hosts support optional port (default is whatever `port` parameter is) in the format `host:port`. Once ready, you will use the parameter `hostgroup` or `Host Setup` modal to see it in action!
294 |
295 | Note: Colors can be used in the tab name by using the format `[color]text[/color]` (i.e. `[red]production[/red]`). You can also use emojis supported by Rich (can see them by running `python -m rich.emoji`) by using the format `:emoji:` (i.e. `:ghost:`). Rich supports the normal emoji shortcodes.
296 |
297 | Example:
298 |
299 | ```ini
300 | [cluster1]
301 | 1={"host": "host1", "tab_title": "[yellow]host1[/yellow] :ghost:", "credential_profile": "dev"}
302 | 2={"host": "host2", "tab_title": "[blue]host2[/blue] :ghost:", "credential_profile": "dev"}
303 | 3={"host": "host3:3307", "tab_title": "[red]production[/red]", "credential_profile": "prod"}
304 | 4={"host": "host4"}
305 | ```
306 |
307 | ## Feedback
308 |
309 | I welcome all questions, bug reports, and requests. If you enjoy Dolphie, please let me know! I'd love to hear from you :dolphin:
310 |
--------------------------------------------------------------------------------
/dolphie/DataTypes.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | from dataclasses import dataclass, field
3 | from typing import Dict, List, Union
4 |
5 | import pymysql
6 | from rich.table import Table
7 |
8 | from dolphie.Modules.Functions import format_query, format_time
9 |
10 |
11 | @dataclass
12 | class ConnectionSource:
13 | mysql = "MySQL"
14 | proxysql = "ProxySQL"
15 | mariadb = "MariaDB"
16 |
17 |
18 | @dataclass
19 | class ConnectionStatus:
20 | connecting = "CONNECTING"
21 | connected = "CONNECTED"
22 | disconnected = "DISCONNECTED"
23 | read_write = "R/W"
24 | read_only = "RO"
25 |
26 |
27 | @dataclass
28 | class Replica:
29 | row_key: str
30 | thread_id: int
31 | host: str
32 | port: int = None
33 | host_distro: str = None
34 | connection: pymysql.Connection = None
35 | connection_source_alt: ConnectionSource = None
36 | table: Table = None
37 | replication_status: Dict[str, Union[str, int]] = field(default_factory=dict)
38 | mysql_version: str = None
39 |
40 |
41 | class ReplicaManager:
42 | def __init__(self):
43 | self.available_replicas: List[Dict[str, str]] = []
44 | self.replicas: Dict[str, Replica] = {}
45 | self.ports: Dict[str, Dict[str, Union[str, bool]]] = {}
46 |
47 | # This is mainly for MariaDB since it doesn't have a way to map a replica in processlist to a specific port
48 | # Instead of using the thread_id as key, we use the host and port to create a unique row key
49 | # for the replica sections
50 | def create_replica_row_key(self, host: str, port: int) -> str:
51 | input_string = f"{host}:{port}"
52 | return hashlib.sha256(input_string.encode()).hexdigest()
53 |
54 | def add_replica(self, row_key: str, thread_id: int, host: str, port: int) -> Replica:
55 | self.replicas[row_key] = Replica(row_key=row_key, thread_id=thread_id, host=host, port=port)
56 |
57 | return self.replicas[row_key]
58 |
59 | def remove_replica(self, row_key: str):
60 | del self.replicas[row_key]
61 |
62 | def get_replica(self, row_key: str) -> Replica:
63 | return self.replicas.get(row_key)
64 |
65 | def remove_all_replicas(self):
66 | if self.replicas:
67 | for replica in self.replicas.values():
68 | if replica.connection:
69 | replica.connection.close()
70 |
71 | self.replicas = {}
72 |
73 | def get_sorted_replicas(self) -> List[Replica]:
74 | return sorted(self.replicas.values(), key=lambda x: x.host)
75 |
76 |
77 | @dataclass
78 | class Panel:
79 | name: str
80 | display_name: str
81 | key: str = None
82 | visible: bool = False
83 | daemon_supported: bool = True
84 |
85 |
86 | class Panels:
87 | def __init__(self):
88 | self.dashboard = Panel("dashboard", "Dashboard", "¹", daemon_supported=False)
89 | self.processlist = Panel("processlist", "Processlist", "²")
90 | self.graphs = Panel("graphs", "Metric Graphs", "³", daemon_supported=False)
91 | self.replication = Panel("replication", "Replication", "⁴", daemon_supported=False)
92 | self.metadata_locks = Panel("metadata_locks", "Metadata Locks", "⁵")
93 | self.ddl = Panel("ddl", "DDL", "⁶", daemon_supported=False)
94 | self.pfs_metrics = Panel("pfs_metrics", "Performance Schema Metrics", "⁷")
95 | self.statements_summary = Panel("statements_summary", "Statements Summary", "⁸")
96 | self.proxysql_hostgroup_summary = Panel("proxysql_hostgroup_summary", "Hostgroup Summary", "⁴")
97 | self.proxysql_mysql_query_rules = Panel(
98 | "proxysql_mysql_query_rules", "Query Rules", "⁵", daemon_supported=False
99 | )
100 | self.proxysql_command_stats = Panel("proxysql_command_stats", "Command Stats", "⁶", daemon_supported=False)
101 |
102 | def validate_panels(self, panel_list_str: Union[str, List[str]], valid_panel_names: List[str]) -> List[str]:
103 | panels = panel_list_str.split(",") if isinstance(panel_list_str, str) else panel_list_str
104 |
105 | invalid_panels = [panel for panel in panels if panel not in valid_panel_names]
106 | if invalid_panels:
107 | raise ValueError(
108 | f"Panel(s) [red2]{', '.join(invalid_panels)}[/red2] are not valid (see --help for more information)"
109 | )
110 |
111 | return panels
112 |
113 | def get_panel(self, panel_name: str) -> Panel:
114 | return self.__dict__.get(panel_name, None)
115 |
116 | def get_all_daemon_panel_names(self) -> List[str]:
117 | return [panel.name for panel in self.__dict__.values() if isinstance(panel, Panel) and panel.daemon_supported]
118 |
119 | def get_all_panels(self) -> List[Panel]:
120 | return [panel for panel in self.__dict__.values() if isinstance(panel, Panel)]
121 |
122 | def get_key(self, panel_name: str) -> str:
123 | # This uses Rich's syntax for highlighting, not Textual's Content system
124 | return f"[b highlight]{self.get_panel(panel_name).key}[/b highlight]"
125 |
126 | def get_panel_title(self, panel_name: str) -> str:
127 | panel = self.get_panel(panel_name)
128 | return f"[$b_highlight]{panel.key}[/$b_highlight]{panel.display_name}"
129 |
130 | def all(self) -> List[str]:
131 | return [
132 | panel.name
133 | for name, panel in self.__dict__.items()
134 | if not name.startswith("__") and isinstance(panel, Panel)
135 | ]
136 |
137 |
138 | class ProcesslistThread:
139 | def __init__(self, thread_data: Dict[str, str]):
140 | self.thread_data = thread_data
141 |
142 | self.id = str(thread_data.get("id", ""))
143 | self.mysql_thread_id = thread_data.get("mysql_thread_id")
144 | self.user = thread_data.get("user", "")
145 | self.host = thread_data.get("host", "")
146 | self.db = thread_data.get("db", "")
147 | self.time = int(thread_data.get("time", 0))
148 | self.protocol = self._get_formatted_string(thread_data.get("connection_type", ""))
149 | self.formatted_query = self._get_formatted_query(thread_data.get("query", ""))
150 | self.formatted_time = self._get_formatted_time()
151 | self.command = self._get_formatted_command(thread_data.get("command", ""))
152 | self.state = self._get_formatted_string(thread_data.get("state", ""))
153 | self.trx_state = self._get_formatted_string(thread_data.get("trx_state", ""))
154 | self.trx_operation_state = self._get_formatted_string(thread_data.get("trx_operation_state", ""))
155 | self.trx_rows_locked = self._get_formatted_number(thread_data.get("trx_rows_locked", 0))
156 | self.trx_rows_modified = self._get_formatted_number(thread_data.get("trx_rows_modified", 0))
157 | self.trx_concurrency_tickets = self._get_formatted_number(thread_data.get("trx_concurrency_tickets", 0))
158 | self.trx_time = self._get_formatted_trx_time(thread_data.get("trx_time", ""))
159 |
160 | def _get_formatted_time(self) -> str:
161 | thread_color = self._get_time_color()
162 | return f"[{thread_color}]{format_time(self.time)}[/{thread_color}]" if thread_color else format_time(self.time)
163 |
164 | def _get_time_color(self) -> str:
165 | thread_color = ""
166 | if "Group replication" not in self.formatted_query.code: # Don't color GR threads
167 | if "SELECT /*!40001 SQL_NO_CACHE */ *" in self.formatted_query.code:
168 | thread_color = "purple"
169 | elif self.formatted_query.code:
170 | if self.time >= 10:
171 | thread_color = "red"
172 | elif self.time >= 5:
173 | thread_color = "yellow"
174 | else:
175 | thread_color = "green"
176 | return thread_color
177 |
178 | def _get_formatted_command(self, command: str):
179 | return "[red]Killed[/red]" if command == "Killed" else command
180 |
181 | def _get_formatted_trx_time(self, trx_time: str):
182 | return format_time(int(trx_time)) if trx_time else "[dark_gray]N/A"
183 |
184 | def _get_formatted_query(self, query: str):
185 | return format_query(query)
186 |
187 | def _get_formatted_string(self, string: str):
188 | if not string:
189 | return "[dark_gray]N/A"
190 |
191 | return string
192 |
193 | def _get_formatted_number(self, number):
194 | if not number or number == "0":
195 | return "[dark_gray]0"
196 |
197 | return number
198 |
199 |
200 | class ProxySQLProcesslistThread:
201 | def __init__(self, thread_data: Dict[str, str]):
202 | self.thread_data = thread_data
203 |
204 | self.id = str(thread_data.get("id", ""))
205 | self.hostgroup = int(thread_data.get("hostgroup"))
206 | self.user = thread_data.get("user", "")
207 | self.frontend_host = self._get_formatted_string(thread_data.get("frontend_host", ""))
208 | self.host = self._get_formatted_string(thread_data.get("backend_host", ""))
209 | self.db = thread_data.get("db", "")
210 | self.time = int(thread_data.get("time", 0)) / 1000 # Convert to seconds since ProxySQL returns milliseconds
211 | self.formatted_query = self._get_formatted_query(thread_data.get("query", "").strip(" \t\n\r"))
212 | self.formatted_time = self._get_formatted_time()
213 | self.command = self._get_formatted_command(thread_data.get("command", ""))
214 | self.extended_info = thread_data.get("extended_info", "")
215 |
216 | def _get_formatted_time(self) -> str:
217 | thread_color = self._get_time_color()
218 | return f"[{thread_color}]{format_time(self.time)}[/{thread_color}]" if thread_color else format_time(self.time)
219 |
220 | def _get_time_color(self) -> str:
221 | thread_color = ""
222 | if self.formatted_query.code:
223 | if self.time >= 10:
224 | thread_color = "red"
225 | elif self.time >= 5:
226 | thread_color = "yellow"
227 | else:
228 | thread_color = "green"
229 | return thread_color
230 |
231 | def _get_formatted_command(self, command: str):
232 | return "[red]Killed[/red]" if command == "Killed" else command
233 |
234 | def _get_formatted_trx_time(self, trx_time: str):
235 | return format_time(int(trx_time)) if trx_time else "[dark_gray]N/A"
236 |
237 | def _get_formatted_query(self, query: str):
238 | return format_query(query)
239 |
240 | def _get_formatted_string(self, string: str):
241 | if not string:
242 | return "[dark_gray]N/A"
243 |
244 | return string
245 |
246 | def _get_formatted_number(self, number):
247 | if not number or number == "0":
248 | return "[dark_gray]0"
249 |
250 | return number
251 |
252 |
253 | class HotkeyCommands:
254 | show_thread = "show_thread"
255 | thread_filter = "thread_filter"
256 | thread_kill_by_parameter = "thread_kill_by_parameter"
257 | variable_search = "variable_search"
258 | rename_tab = "rename_tab"
259 | refresh_interval = "refresh_interval"
260 | replay_seek = "replay_seek"
261 | maximize_panel = "maximize_panel"
262 |
--------------------------------------------------------------------------------
/dolphie/Dolphie.tcss:
--------------------------------------------------------------------------------
1 | * {
2 | scrollbar-background: #161e31;
3 | scrollbar-background-hover: #161e31;
4 | scrollbar-background-active: #161e31;
5 |
6 | scrollbar-color: #33405d;
7 | scrollbar-color-hover: #404f71;
8 | scrollbar-color-active: #4f608a;
9 |
10 | color: #e9e9e9;
11 | }
12 |
13 | #_default, CommandScreen, EventLog {
14 | background: #0a0e1b;
15 | }
16 | Graph {
17 | height: 100%;
18 | }
19 | Horizontal {
20 | height: auto;
21 | }
22 | Rule {
23 | color: #32416a;
24 | margin: 0;
25 | }
26 | DataTable {
27 | background: #0f1525;
28 | overflow-x: hidden;
29 | max-height: 40;
30 |
31 | &:focus {
32 | background-tint: #0f1525;
33 | }
34 |
35 | & > .datatable--odd-row {
36 | background: #131a2c;
37 | }
38 | & > .datatable--even-row {
39 | background: #0f1525;
40 | }
41 | & > .datatable--header {
42 | background: transparent;
43 | color: #c5c7d2;
44 | }
45 | }
46 |
47 | LoadingIndicator {
48 | color: #8fb0ee;
49 | height: auto;
50 | content-align: center middle;
51 | margin-top: 3;
52 | }
53 | ModalScreen {
54 | background: #0d1015 70%;
55 | align: center middle;
56 | height: 40;
57 | color: #4d5a85;
58 | }
59 | Sparkline {
60 | margin-top: 1;
61 | margin-bottom: 1;
62 | }
63 | SpinnerWidget {
64 | color: #8fb0ee;
65 | height: auto;
66 | content-align: center middle;
67 | }
68 | CommandList {
69 | text-style: none;
70 | border-bottom: hkey #1c2440;
71 | max-height: 80vh;
72 |
73 | & > .option-list--option {
74 | text-style: none;
75 | padding: 0;
76 | }
77 | & > .option-list--option-highlighted {
78 | color: #b7c7ee;
79 | background: #171e2f;
80 | text-style: bold;
81 | }
82 | & > .option-list--option-hover {
83 | color: #b7c7ee;
84 | background: #171e2f;
85 | text-style: bold;
86 | }
87 | }
88 | CommandPalette {
89 | background: #0d1015 70%;
90 |
91 | & > Vertical {
92 | background: #0a0e1b;
93 | }
94 | & #--input {
95 | border-top: hkey #1c2440;
96 | }
97 | }
98 | CommandInput, CommandInput:focus {
99 | background: #0a0e1b !important;
100 | width: 1fr !important;
101 | border: blank !important;
102 | padding-left: 0 !important;
103 | margin: 0 1 !important;
104 | }
105 | TopBar {
106 | dock: top;
107 | background: #192036;
108 | height: 1;
109 | layout: horizontal;
110 | }
111 | #topbar_title {
112 | width: 15%;
113 | content-align: left middle;
114 | }
115 | #topbar_host {
116 | color: #bbc8e8;
117 | width: 70%;
118 | content-align: center middle;
119 | text-style: bold;
120 | }
121 | #topbar_help {
122 | width: 15%;
123 | content-align: right middle;
124 | color: #8f9fc1;
125 | }
126 |
127 | #metric_graphs_title, #replication_title, #replicas_title, #group_replication_title, #clusterset_title {
128 | color: #bbc8e8;
129 | text-style: bold;
130 | width: 100%;
131 | content-align: center middle;
132 | }
133 |
134 | #statements_summary_radio_set {
135 | margin-bottom: 1;
136 | }
137 | .dashboard_replay {
138 | height: auto;
139 | width: 65;
140 | content-align: center middle;
141 | align: center middle;
142 | }
143 | #dashboard_replay_progressbar {
144 | width: 100%;
145 |
146 | & > #bar {
147 | width: 65;
148 | }
149 |
150 | & > #bar > .bar--bar {
151 | color: #91abec;
152 | background: #3a3f51;
153 | }
154 | & > #bar > .bar--complete {
155 | color: #54efae;
156 | }
157 | }
158 |
159 | .replay_buttons {
160 | height: auto;
161 | width: 65;
162 | background: #20283b;
163 | border-top: tall #192033;
164 | border-bottom: tall #192033;
165 | }
166 | .replay_button {
167 | text-style: none;
168 | background: #20283b;
169 | background-tint: transparent;
170 | border: none;
171 |
172 | &:hover {
173 | background: #20283b;
174 | color: #91abec;
175 | border: none;
176 | text-style: bold;
177 | }
178 | &:focus {
179 | background: #20283b;
180 | color: #91abec;
181 | border: none;
182 | text-style: bold;
183 | }
184 | &.-active {
185 | background: #20283b;
186 | color: #5475c7;
187 | border: none;
188 | text-style: bold;
189 | tint: #273148 0%;
190 | }
191 | }
192 |
193 | #panel_graphs, #panel_replication {
194 | height: auto;
195 |
196 | & Tabs {
197 | margin-top: 0;
198 | }
199 | }
200 | #panel_graphs {
201 | & .metric_graph_container, .metric_graph_container2 {
202 | height: 19;
203 | }
204 | & .metric_graph_stats {
205 | width: 100%;
206 | content-align: center middle;
207 | }
208 | }
209 | Screen.-maximized-view {
210 | height: 100%;
211 | & .metric_graph_container, .metric_graph_container2, TabPane {
212 | height: 1fr !important;
213 | }
214 | & Tabs {
215 | margin-top: 0;
216 | }
217 | & DataTable {
218 | height: 100% !important;
219 | max-height: 100% !important;
220 | }
221 | }
222 |
223 | #pfs_metrics_file_io_datatable, #pfs_metrics_table_io_waits_datatable, #proxysql_hostgroup_summary_datatable, #proxysql_mysql_query_rules_datatable {
224 | overflow-x: auto;
225 | }
226 |
227 | .panel_container {
228 | height: auto;
229 | width: 100%;
230 | background: #0f1525;
231 | border: tall #1b233a;
232 | margin-bottom: 1;
233 |
234 | & > Label {
235 | width: 100%;
236 | color: #bbc8e8;
237 | text-style: bold;
238 | content-align: center middle;
239 | }
240 |
241 | & #statements_summary_info {
242 | content-align: left middle;
243 | margin-left: 1;
244 | }
245 | }
246 |
247 | .dashboard {
248 | height: auto;
249 |
250 | & > Center {
251 | height: auto;
252 | layout: horizontal;
253 | content-align: center middle;
254 |
255 | & > * {
256 | content-align: center top;
257 | height: 11;
258 | width: auto;
259 | padding-left: 1;
260 | padding-right: 1;
261 | margin-bottom: 0;
262 | }
263 | }
264 | }
265 |
266 | .replication {
267 | height: auto;
268 | margin-bottom: 1;
269 |
270 | & > Label {
271 | width: 100%;
272 | content-align: center middle;
273 | }
274 |
275 | & > Center {
276 | height: auto;
277 | layout: horizontal;
278 | content-align: center middle;
279 |
280 | & > ScrollableContainer {
281 | content-align: center top;
282 | background: #0f1525;
283 | border: tall #1b233a;
284 | height: auto;
285 | max-height: 18;
286 | }
287 |
288 | & > .replication_status {
289 | max-width: 85;
290 | }
291 | & > .replication_thread_applier {
292 | max-width: 105;
293 | }
294 | }
295 | }
296 |
297 | .replicas, .group_replication {
298 | height: auto;
299 | margin-bottom: 1;
300 |
301 | & > Container {
302 | height: auto;
303 | layout: grid;
304 | grid-size: 2;
305 | grid-gutter: 1 0;
306 | }
307 | & > Container > ScrollableContainer {
308 | background: #0f1525;
309 | border: tall #1b233a;
310 | height: auto;
311 | max-height: 20;
312 | }
313 | & > Label {
314 | width: 100%;
315 | content-align: center middle;
316 | }
317 | & > LoadingIndicator {
318 | margin-top: 0;
319 | }
320 | }
321 | .group_replication > Container {
322 | grid-size: 3;
323 | }
324 |
325 |
326 | TabPane {
327 | padding: 0;
328 | }
329 | Tabs {
330 | margin-top: 1;
331 |
332 | &:focus {
333 | & .-active {
334 | background: #0a0e1b;
335 | }
336 | & .underline--bar {
337 | color: #caddfe;
338 | }
339 | }
340 |
341 | }
342 | Tab {
343 | &:focus {
344 | text-style: bold;
345 | }
346 | &:hover {
347 | text-style: bold;
348 | }
349 | &.-active {
350 | text-style: bold;
351 | }
352 | &.-active:focus {
353 | text-style: bold;
354 | }
355 | }
356 |
357 | Underline > .underline--bar {
358 | background: #283357;
359 | color: #82a5e1;
360 | }
361 |
362 | AutoComplete {
363 | AutoCompleteList {
364 | background: #151926;
365 | border-left: wide #384673;
366 |
367 | & > .option-list--option-hover {
368 | background: #283048;
369 | color: #b7c7ee;
370 | text-style: bold;
371 | }
372 | }
373 |
374 | & .autocomplete--highlight-match {
375 | color: #c7d734;
376 | background: #384673;
377 | text-style: bold;
378 | }
379 |
380 | & .option-list--option-highlighted {
381 | background: #283048;
382 | color: #b7c7ee;
383 | text-style: bold;
384 | }
385 | }
386 |
387 | Input {
388 | width: 40;
389 | margin: 0 2;
390 | margin-bottom: 1;
391 | background: #111322;
392 | border: tall #252e49;
393 |
394 | &:focus {
395 | background-tint: transparent;
396 | background: #151729;
397 | border: tall #43548b;
398 | }
399 |
400 | & > .input--placeholder {
401 | color: #5e6b87;
402 | }
403 | }
404 |
405 | .button_container {
406 | height: auto;
407 | width: 100%;
408 | align-horizontal: center;
409 | }
410 | .switch_container {
411 | align: center top;
412 | margin: 1 0 1 0;
413 | height: auto;
414 |
415 | & > .switch_container > Label {
416 | color: #bbc8e8;
417 | padding-left: 1;
418 | text-style: bold;
419 | }
420 | }
421 |
422 | Sparkline {
423 | & > .sparkline--max-color {
424 | color: #869fd9;
425 | }
426 | & > .sparkline--min-color {
427 | color: #384c7a;
428 | }
429 | }
430 |
431 | Button {
432 | background: #282c42;
433 | border-top: tall #54597b;
434 | border-bottom: tall #171922;
435 |
436 | &:hover {
437 | background: #383e5c;
438 | border-top: tall #69709a;
439 | border-bottom: tall #171922;
440 | }
441 | &:focus {
442 | background: #383e5c;
443 | border-top: tall #69709a;
444 | border-bottom: tall #171922;
445 | text-style: bold;
446 | }
447 |
448 | &.-primary {
449 | background: #192c5b;
450 | border-top: tall #425894;
451 | border-bottom: tall #151b2b;
452 | }
453 | &.-primary:hover {
454 | background: #203875;
455 | border-top: tall #4a62a4;
456 | border-bottom: tall #151b2b;
457 | }
458 | &.-primary:focus {
459 | background: #203875;
460 | border-top: tall #4a62a4;
461 | border-bottom: tall #151b2b;
462 | text-style: bold;
463 | }
464 | }
465 |
466 | Switch {
467 | background: #0a0e1b;
468 | border: none #364774;
469 |
470 | &:focus {
471 | background-tint: transparent;
472 | }
473 |
474 | & > .switch--slider {
475 | color: #424d6f;
476 | background: #192132;
477 | }
478 |
479 | &:focus > .switch--slider {
480 | color: #fd8383;
481 | }
482 | &:hover > .switch--slider {
483 | color: #fd8383;
484 | }
485 |
486 | &.-on > .switch--slider {
487 | color: #5c81d7;
488 | }
489 | &.-on:focus > .switch--slider {
490 | color: #54efae;
491 | }
492 | &.-on:hover > .switch--slider {
493 | color: #54efae;
494 | }
495 | }
496 |
497 | Checkbox {
498 | background: #131626;
499 | border: none;
500 | padding-left: 2;
501 | padding-bottom: 1;
502 | content-align: left middle;
503 |
504 | & .toggle--button {
505 | color: #0f1525;
506 | text-style: bold;
507 | background: #343d56;
508 | }
509 | }
510 |
511 | RadioSet {
512 | background: transparent;
513 | border: none;
514 | layout: horizontal;
515 |
516 | &:focus {
517 | background-tint: transparent;
518 |
519 | & > RadioButton.-selected > .toggle--label {
520 | text-style: bold;
521 | color: #a5b8e7;
522 | }
523 |
524 | & > .toggle--label {
525 | text-style: bold;
526 | }
527 | }
528 | }
529 |
530 | RadioButton {
531 | background: transparent;
532 | margin-right: 2;
533 | width: auto;
534 |
535 | & > .toggle--button {
536 | color: #0f1525;
537 | background: #343d56;
538 | }
539 | }
540 |
541 | ToggleButton {
542 | &:focus {
543 | background-tint: transparent;
544 | & > .toggle--label {
545 | text-style: bold;
546 | color: #a5b8e7;
547 | background: transparent;
548 | }
549 | }
550 | &:hover {
551 | & > .toggle--label {
552 | text-style: bold;
553 | color: #a5b8e7;
554 | background: transparent;
555 | }
556 | }
557 | &.-on {
558 | background: transparent;
559 | & > .toggle--button {
560 | text-style: bold;
561 | color: #74e3b5;
562 | background: #305240;
563 | }
564 | }
565 | }
566 |
567 | Toast {
568 | width: auto;
569 | max-width: 50%;
570 | background: #20283e;
571 |
572 | &.-information {
573 | border-left: wide #788bc9;
574 | border-right: wide #192036;
575 | }
576 | &.-warning {
577 | border-left: wide #f0e357;
578 | border-right: wide #192036;
579 | }
580 | &.-success {
581 | border-left: wide #5bd088;
582 | border-right: wide #192036;
583 | }
584 | &.-success .toast--title {
585 | color: #6ae398;
586 | }
587 | &.-error {
588 | border-left: wide #f05757;
589 | border-right: wide #192036;
590 | }
591 | &.-error .toast--title {
592 | color: #ed6363;
593 | }
594 | }
595 |
596 | Select {
597 | margin: 0 2;
598 | margin-bottom: 1;
599 | width: 100%;
600 |
601 | & > SelectOverlay {
602 | background: #111322;
603 |
604 | &:focus {
605 | background-tint: transparent;
606 | }
607 | }
608 |
609 | &:focus > SelectCurrent {
610 | border: tall #43548b;
611 | background-tint: transparent;
612 | background: #151729;
613 | }
614 | }
615 |
616 | SelectCurrent {
617 | background: #111322;
618 | border: tall #252e49;
619 |
620 | & Static#label {
621 | color: #606e88;
622 | }
623 |
624 | &.-has-value Static#label {
625 | color: #e9e9e9;
626 | }
627 | }
628 |
629 | Select > OptionList {
630 | background: #111322;
631 | border: tall #252e49;
632 | width: 100%;
633 | height: 15;
634 | margin: 0 1 0 1;
635 |
636 | &:focus {
637 | margin: 0;
638 | height: auto;
639 | max-height: 15;
640 | border: tall #3c476b;
641 | }
642 |
643 | & > .option-list--option-highlighted {
644 | text-style: bold;
645 | background: #131626;
646 | }
647 | &:focus > .option-list--option-highlighted {
648 | text-style: bold;
649 | background: #283048;
650 | }
651 | & > .option-list--option-hover {
652 | background: #21283c;
653 | }
654 | & > .option-list--option-hover-highlighted {
655 | background: #21283c;
656 | text-style: none;
657 | }
658 | &:focus > .option-list--option-hover-highlighted {
659 | background: #21283c;
660 | text-style: none;
661 | }
662 | }
--------------------------------------------------------------------------------
/dolphie/Modules/CommandManager.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from dolphie.DataTypes import ConnectionSource
4 |
5 |
6 | class CommandManager:
7 | def __init__(self):
8 | self.command_keys = {
9 | ConnectionSource.mysql: {
10 | "Commands": {
11 | "1": {"human_key": "1", "description": "Toggle panel Dashboard"},
12 | "2": {"human_key": "2", "description": "Toggle panel Processlist"},
13 | "3": {"human_key": "3", "description": "Toggle panel Metric Graphs"},
14 | "4": {"human_key": "4", "description": "Toggle panel Replication/Replicas"},
15 | "5": {"human_key": "5", "description": "Toggle panel Metadata Locks"},
16 | "6": {"human_key": "6", "description": "Toggle panel DDLs"},
17 | "7": {"human_key": "7", "description": "Toggle panel Performance Schema Metrics"},
18 | "8": {"human_key": "8", "description": "Toggle panel Statements Summary Metrics"},
19 | "placeholder_1": {"human_key": "", "description": ""},
20 | "grave_accent": {"human_key": "`", "description": "Open tab setup"},
21 | "plus": {"human_key": "+", "description": "Create a new tab"},
22 | "minus": {"human_key": "-", "description": "Remove the current tab"},
23 | "equals_sign": {"human_key": "=", "description": "Rename the current tab"},
24 | "D": {"human_key": "D", "description": "Disconnect from the tab's host"},
25 | "ctrl+a": {"human_key": "ctrl+a", "description": "Switch to the previous tab"},
26 | "ctrl+d": {"human_key": "ctrl+d", "description": "Switch to the next tab"},
27 | "placeholder_2": {"human_key": "", "description": ""},
28 | "a": {"human_key": "a", "description": "Toggle additional processlist columns"},
29 | "A": {
30 | "human_key": "A",
31 | "description": "Toggle query digest text/sample text in Statements Summary panel",
32 | },
33 | "C": {
34 | "human_key": "C",
35 | "description": "Toggle display of threads with concurrency tickets in Processlist panel",
36 | },
37 | "i": {"human_key": "i", "description": "Toggle display of idle threads"},
38 | "T": {
39 | "human_key": "T",
40 | "description": "Toggle display of threads that only have an active transaction",
41 | },
42 | "p": {"human_key": "p", "description": "Toggle pause for refreshing of panels"},
43 | "P": {
44 | "human_key": "P",
45 | "description": (
46 | "Toggle between Information Schema and Performance Schema for the Processlist panel"
47 | ),
48 | },
49 | "s": {"human_key": "s", "description": "Toggle sorting for Age in Processlist panel"},
50 | "placeholder_3": {"human_key": "", "description": ""},
51 | "l": {"human_key": "l", "description": "Display the most recent deadlock"},
52 | "o": {"human_key": "o", "description": "Display output from SHOW ENGINE INNODB STATUS"},
53 | "m": {"human_key": "m", "description": "Display memory usage"},
54 | "d": {"human_key": "d", "description": "Display all databases"},
55 | "e": {"human_key": "e", "description": "Display error log from Performance Schema"},
56 | "t": {
57 | "human_key": "t",
58 | "description": "Display details of a thread along with an EXPLAIN of its query",
59 | },
60 | "u": {"human_key": "u", "description": "Display active connected users and their statistics"},
61 | "v": {"human_key": "v", "description": "Display variables from SHOW GLOBAL VARIABLES"},
62 | "z": {"human_key": "z", "description": "Display all entries in the host cache"},
63 | "Z": {
64 | "human_key": "Z",
65 | "description": (
66 | "Display table sizes and fragmentation for all databases - "
67 | "[$yellow]Heed caution if you have a lot of tables![/$yellow]"
68 | ),
69 | },
70 | "placeholder_4": {"human_key": "", "description": ""},
71 | "c": {"human_key": "c", "description": "Clear all filters set"},
72 | "f": {"human_key": "f", "description": "Filter threads by field(s)"},
73 | "E": {"human_key": "E", "description": "Export the processlist to a CSV file"},
74 | "k": {"human_key": "k", "description": "Kill thread(s)"},
75 | "M": {"human_key": "M", "description": "Maximize a panel"},
76 | "q": {"human_key": "q", "description": "Quit"},
77 | "r": {"human_key": "r", "description": "Set the refresh interval"},
78 | "R": {"human_key": "R", "description": "Reset all metrics"},
79 | "space": {
80 | "human_key": "space",
81 | "description": "Force a manual refresh of all panels except replicas",
82 | },
83 | }
84 | },
85 | ConnectionSource.proxysql: {
86 | "Commands": {
87 | "1": {"human_key": "1", "description": "Toggle panel Dashboard"},
88 | "2": {"human_key": "2", "description": "Toggle panel Processlist"},
89 | "3": {"human_key": "3", "description": "Toggle panel Metric Graphs"},
90 | "4": {"human_key": "4", "description": "Toggle panel Hostgroup Summary"},
91 | "5": {"human_key": "5", "description": "Toggle panel Query Rules"},
92 | "6": {"human_key": "6", "description": "Toggle panel Command Statistics"},
93 | "placeholder_1": {"human_key": "", "description": ""},
94 | "grave_accent": {"human_key": "`", "description": "Open tab setup"},
95 | "plus": {"human_key": "+", "description": "Create a new tab"},
96 | "minus": {"human_key": "-", "description": "Remove the current tab"},
97 | "equals_sign": {"human_key": "=", "description": "Rename the current tab"},
98 | "D": {"human_key": "D", "description": "Disconnect from the tab's host"},
99 | "ctrl+a": {"human_key": "ctrl+a", "description": "Switch to the previous tab"},
100 | "ctrl+d": {"human_key": "ctrl+d", "description": "Switch to the next tab"},
101 | "placeholder_2": {"human_key": "", "description": ""},
102 | "a": {"human_key": "a", "description": "Toggle additional processlist columns"},
103 | "i": {"human_key": "i", "description": "Toggle display of idle threads"},
104 | "p": {"human_key": "p", "description": "Toggle pause for refreshing of panels"},
105 | "s": {"human_key": "s", "description": "Toggle sorting for Age in Processlist panel"},
106 | "placeholder_3": {"human_key": "", "description": ""},
107 | "e": {
108 | "human_key": "e",
109 | "description": "Display errors reported by backend servers during query execution",
110 | },
111 | "m": {"human_key": "m", "description": "Display memory usage"},
112 | "t": {"human_key": "t", "description": "Display details of a thread"},
113 | "u": {"human_key": "u", "description": "Display frontend users connected"},
114 | "v": {"human_key": "v", "description": "Display variables from SHOW GLOBAL VARIABLES"},
115 | "z": {"human_key": "z", "description": "Display all entries in the host cache"},
116 | "placeholder_4": {"human_key": "", "description": ""},
117 | "c": {"human_key": "c", "description": "Clear all filters set"},
118 | "f": {"human_key": "f", "description": "Filter threads by field(s)"},
119 | "E": {"human_key": "E", "description": "Export the processlist to a CSV file"},
120 | "k": {"human_key": "k", "description": "Kill thread(s)"},
121 | "M": {"human_key": "M", "description": "Maximize a panel"},
122 | "q": {"human_key": "q", "description": "Quit"},
123 | "r": {"human_key": "r", "description": "Set the refresh interval"},
124 | "R": {"human_key": "R", "description": "Reset all metrics"},
125 | "space": {
126 | "human_key": "space",
127 | "description": "Force a manual refresh of all panels except replicas",
128 | },
129 | },
130 | "Terminology": {
131 | "FE": {"description": "Frontend"},
132 | "BE": {"description": "Backend"},
133 | "Conn": {"description": "Connection"},
134 | "CP": {"description": "Connection Pool"},
135 | "MP": {"description": "Multiplex"},
136 | },
137 | },
138 | "mysql_replay": {
139 | "Commands": {
140 | "1": {"human_key": "1", "description": "Toggle panel Dashboard"},
141 | "2": {"human_key": "2", "description": "Toggle panel Processlist"},
142 | "3": {"human_key": "3", "description": "Toggle panel Metric Graphs"},
143 | "4": {"human_key": "4", "description": "Toggle panel Replication/Replicas"},
144 | "5": {"human_key": "5", "description": "Toggle panel Metadata Locks"},
145 | "7": {"human_key": "7", "description": "Toggle panel Performance Schema Metrics"},
146 | "8": {"human_key": "8", "description": "Toggle panel Statements Summary Metrics"},
147 | "placeholder_1": {"human_key": "", "description": ""},
148 | "grave_accent": {"human_key": "`", "description": "Open tab setup"},
149 | "plus": {"human_key": "+", "description": "Create a new tab"},
150 | "minus": {"human_key": "-", "description": "Remove the current tab"},
151 | "equals_sign": {"human_key": "=", "description": "Rename the current tab"},
152 | "ctrl+a": {"human_key": "ctrl+a", "description": "Switch to the previous tab"},
153 | "ctrl+d": {"human_key": "ctrl+d", "description": "Switch to the next tab"},
154 | "placeholder_2": {"human_key": "", "description": ""},
155 | "a": {"human_key": "a", "description": "Toggle additional processlist columns"},
156 | "A": {
157 | "human_key": "A",
158 | "description": "Toggle query digest text/sample text in Statements Summary panel",
159 | },
160 | "C": {
161 | "human_key": "C",
162 | "description": "Toggle display of concurrency threads with tickets in Processlist panel",
163 | },
164 | "T": {
165 | "human_key": "T",
166 | "description": "Toggle display of threads that only have an active transaction",
167 | },
168 | "s": {"human_key": "s", "description": "Toggle sorting for Age in Processlist panel"},
169 | "placeholder_3": {"human_key": "", "description": ""},
170 | "t": {"human_key": "t", "description": "Display details of a thread"},
171 | "v": {"human_key": "v", "description": "Display global variables from SHOW GLOBAL VARIABLES"},
172 | "V": {"human_key": "V", "description": "Display global variables that changed during recording"},
173 | "placeholder_4": {"human_key": "", "description": ""},
174 | "p": {"human_key": "p", "description": "Toggle pause of replay"},
175 | "S": {"human_key": "S", "description": "Seek to a specific time in the replay"},
176 | "left_square_bracket": {
177 | "human_key": "\\[",
178 | "description": " Seek to previous refresh interval in the replay",
179 | },
180 | "right_square_bracket": {
181 | "human_key": "]",
182 | "description": "Seek to next refresh interval in the replay",
183 | },
184 | "placeholder_5": {"human_key": "", "description": ""},
185 | "c": {"human_key": "c", "description": "Clear all filters set"},
186 | "f": {"human_key": "f", "description": "Filter threads by field(s)"},
187 | "E": {"human_key": "E", "description": "Export the processlist to a CSV file"},
188 | "M": {"human_key": "M", "description": "Maximize a panel"},
189 | "q": {"human_key": "q", "description": "Quit"},
190 | "r": {"human_key": "r", "description": "Set the refresh interval"},
191 | }
192 | },
193 | "proxysql_replay": {
194 | "Commands": {
195 | "1": {"human_key": "1", "description": "Toggle panel Dashboard"},
196 | "2": {"human_key": "2", "description": "Toggle panel Processlist"},
197 | "3": {"human_key": "3", "description": "Toggle panel Metric Graphs"},
198 | "4": {"human_key": "4", "description": "Toggle panel Hostgroup Summary"},
199 | "placeholder_1": {"human_key": "", "description": ""},
200 | "grave_accent": {"human_key": "`", "description": "Open tab setup"},
201 | "plus": {"human_key": "+", "description": "Create a new tab"},
202 | "minus": {"human_key": "-", "description": "Remove the current tab"},
203 | "equals_sign": {"human_key": "=", "description": "Rename the current tab"},
204 | "ctrl+a": {"human_key": "ctrl+a", "description": "Switch to the previous tab"},
205 | "ctrl+d": {"human_key": "ctrl+d", "description": "Switch to the next tab"},
206 | "placeholder_2": {"human_key": "", "description": ""},
207 | "a": {"human_key": "a", "description": "Toggle additional processlist columns"},
208 | "s": {"human_key": "s", "description": "Toggle sorting for Age in Processlist panel"},
209 | "placeholder_3": {"human_key": "", "description": ""},
210 | "t": {"human_key": "t", "description": "Display details of a thread"},
211 | "v": {"human_key": "v", "description": "Display global variables from SHOW GLOBAL VARIABLES"},
212 | "V": {"human_key": "V", "description": "Display global variables that changed during recording"},
213 | "placeholder_4": {"human_key": "", "description": ""},
214 | "p": {"human_key": "p", "description": "Toggle pause of replay"},
215 | "S": {"human_key": "S", "description": "Seek to a specific time in the replay"},
216 | "left_square_bracket": {
217 | "human_key": "[",
218 | "description": "Seek to previous refresh interval in the replay",
219 | },
220 | "right_square_bracket": {
221 | "human_key": "]",
222 | "description": "Seek to next refresh interval in the replay",
223 | },
224 | "placeholder_5": {"human_key": "", "description": ""},
225 | "c": {"human_key": "c", "description": "Clear all filters set"},
226 | "f": {"human_key": "f", "description": "Filter threads by field(s)"},
227 | "E": {"human_key": "E", "description": "Export the processlist to a CSV file"},
228 | "M": {"human_key": "M", "description": "Maximize a panel"},
229 | "q": {"human_key": "q", "description": "Quit"},
230 | "r": {"human_key": "r", "description": "Set the refresh interval"},
231 | }
232 | },
233 | }
234 |
235 | # These are keys that we let go through no matter what
236 | self.exclude_keys = [
237 | "up",
238 | "down",
239 | "left",
240 | "right",
241 | "pageup",
242 | "pagedown",
243 | "home",
244 | "end",
245 | "tab",
246 | "enter",
247 | "grave_accent",
248 | "q",
249 | "question_mark",
250 | "plus",
251 | "minus",
252 | "equals_sign",
253 | "ctrl+a",
254 | "ctrl+d",
255 | ]
256 |
257 | def get_commands(self, replay_file: str, connection_source: ConnectionSource) -> Dict[str, Dict[str, str]]:
258 | if replay_file:
259 | key = {ConnectionSource.mysql: "mysql_replay", ConnectionSource.proxysql: "proxysql_replay"}.get(
260 | connection_source, connection_source
261 | )
262 | else:
263 | key = connection_source
264 |
265 | return self.command_keys.get(key, {}).get("Commands")
266 |
--------------------------------------------------------------------------------
/dolphie/Modules/Functions.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | from decimal import Decimal
4 |
5 | import charset_normalizer
6 | from pygments.style import Style
7 | from pygments.token import (
8 | Comment,
9 | Error,
10 | Generic,
11 | Keyword,
12 | Name,
13 | Number,
14 | Operator,
15 | Punctuation,
16 | String,
17 | Whitespace,
18 | )
19 | from rich.markup import escape as markup_escape
20 | from rich.syntax import Syntax
21 |
22 |
23 | class NordModifiedTheme(Style):
24 | nord0 = "#0f1525"
25 | nord1 = "#3b4252"
26 | nord2 = "#434c5e"
27 | nord3 = "#4c566a"
28 | nord3_bright = "#616e87"
29 |
30 | nord4 = "#d8dee9"
31 | nord5 = "#e5e9f0"
32 | nord6 = "#eceff4"
33 |
34 | nord7 = "#8fbcbb"
35 | nord8 = "#88c0d0"
36 | nord9 = "#879bca"
37 | nord10 = "#5e81ac"
38 |
39 | nord11 = "#bf616a"
40 | nord12 = "#d08770"
41 | nord13 = "#81c194"
42 | nord14 = "#ac8bdd"
43 | nord15 = "#ca87a5"
44 |
45 | background_color = nord0
46 | default = nord4
47 |
48 | styles = {
49 | Whitespace: nord4,
50 | Comment: f"italic {nord3_bright}",
51 | Comment.Preproc: nord10,
52 | Keyword: f"bold {nord9}",
53 | Keyword.Pseudo: f"nobold {nord9}",
54 | Keyword.Type: f"nobold {nord9}",
55 | Operator: nord9,
56 | Operator.Word: f"bold {nord9}",
57 | Name: nord4,
58 | Name.Builtin: nord9,
59 | Name.Function: nord8,
60 | Name.Class: nord7,
61 | Name.Namespace: nord7,
62 | Name.Exception: nord11,
63 | Name.Variable: nord4,
64 | Name.Constant: nord7,
65 | Name.Label: nord7,
66 | Name.Entity: nord12,
67 | Name.Attribute: nord7,
68 | Name.Tag: nord9,
69 | Name.Decorator: nord12,
70 | Punctuation: nord6,
71 | String: nord14,
72 | String.Doc: nord3_bright,
73 | String.Interpol: nord14,
74 | String.Escape: nord13,
75 | String.Regex: nord13,
76 | String.Symbol: nord14,
77 | String.Other: nord14,
78 | Number: nord15,
79 | Generic.Heading: f"bold {nord8}",
80 | Generic.Subheading: f"bold {nord8}",
81 | Generic.Deleted: nord11,
82 | Generic.Inserted: nord14,
83 | Generic.Error: nord11,
84 | Generic.Emph: "italic",
85 | Generic.Strong: "bold",
86 | Generic.Prompt: f"bold {nord3}",
87 | Generic.Output: nord4,
88 | Generic.Traceback: nord11,
89 | Error: nord11,
90 | }
91 |
92 |
93 | def format_query(query: str, minify: bool = True) -> Syntax:
94 | if not query:
95 | return Syntax(code="", lexer="sql", word_wrap=True, theme=NordModifiedTheme)
96 |
97 | query = markup_escape(re.sub(r"\s+", " ", query)) if minify else query
98 |
99 | formatted_query = Syntax(code=query, lexer="sql", word_wrap=True, theme=NordModifiedTheme)
100 |
101 | return formatted_query
102 |
103 |
104 | def minify_query(query: str) -> str:
105 | if not query:
106 | return ""
107 |
108 | return markup_escape(re.sub(r"\s+", " ", query))
109 |
110 |
111 | def format_bytes(bytes_value, color=True, decimal=2):
112 | if isinstance(bytes_value, str):
113 | return bytes_value
114 |
115 | units = ["B", "KB", "MB", "GB", "TB"]
116 | unit_index = 0
117 |
118 | while bytes_value >= 1024 and unit_index < len(units) - 1:
119 | bytes_value /= 1024
120 | unit_index += 1
121 |
122 | # Format with specified rounding precision
123 | formatted_value = f"{bytes_value:.{decimal}f}"
124 |
125 | # Remove unnecessary ".00" if rounding results in a whole number
126 | if formatted_value.endswith(f".{'0' * decimal}"):
127 | formatted_value = formatted_value[: -decimal - 1]
128 |
129 | if bytes_value == 0:
130 | return "0"
131 | elif color:
132 | return f"{formatted_value}[highlight]{units[unit_index]}[/highlight]"
133 | else:
134 | return f"{formatted_value}{units[unit_index]}"
135 |
136 |
137 | def format_time(time: int, picoseconds=False):
138 | if time is None:
139 | return "N/A"
140 |
141 | seconds = time / 1e12 if picoseconds else time
142 |
143 | hours, remainder = divmod(seconds, 3600)
144 | minutes, seconds = divmod(remainder, 60)
145 |
146 | return f"{int(hours):02d}:{int(minutes):02d}:{int(seconds):02d}"
147 |
148 |
149 | def format_picoseconds(ps: int) -> str:
150 | units = [
151 | ("d", 24 * 60 * 60 * 1_000_000_000_000), # days
152 | ("h", 60 * 60 * 1_000_000_000_000), # hours
153 | ("m", 60 * 1_000_000_000_000), # minutes
154 | ("s", 1_000_000_000_000), # seconds
155 | ("ms", 1_000_000_000), # milliseconds
156 | ("µs", 1_000_000), # microseconds
157 | ("ns", 1_000), # nanoseconds
158 | ("ps", 1), # picoseconds
159 | ]
160 |
161 | for unit, factor in units:
162 | if ps >= factor:
163 | value = ps / factor
164 | return f"{value:.2f}[highlight]{unit}"
165 | return "[dark_gray]0"
166 |
167 |
168 | def load_host_cache_file(host_cache_file: str):
169 | host_cache = {}
170 | if os.path.exists(host_cache_file):
171 | with open(host_cache_file) as file:
172 | for line in file:
173 | line = line.strip()
174 | error_message = f"Host cache entry '{line}' is not properly formatted! Format: ip=hostname"
175 |
176 | if "=" not in line:
177 | raise Exception(error_message)
178 |
179 | host, hostname = line.split("=", maxsplit=1)
180 | host = host.strip()
181 | hostname = hostname.strip()
182 |
183 | if not host or not hostname:
184 | raise Exception(error_message)
185 |
186 | host_cache[host] = hostname
187 |
188 | return host_cache
189 |
190 |
191 | def detect_encoding(text):
192 | # Since BLOB/BINARY data can be involved, we need to auto-detect what the encoding is
193 | # for queries since it can be anything. If I let pymsql use unicode by default I got
194 | # consistent crashes due to unicode errors for utf8 so we have to go this route
195 | result = charset_normalizer.detect(text)
196 | encoding = result["encoding"]
197 |
198 | if encoding is None:
199 | encoding = "latin1"
200 | elif encoding == "utf-16be":
201 | encoding = "utf-8"
202 |
203 | return encoding
204 |
205 |
206 | def round_num(n, decimal=2):
207 | n = Decimal(n)
208 | return n.to_integral() if n == n.to_integral() else round(n.normalize(), decimal)
209 |
210 |
211 | # This is from https://pypi.org/project/numerize
212 | def format_number(n, decimal=2, color=True):
213 | if not n or n == "0":
214 | return "0"
215 |
216 | # fmt: off
217 | sufixes = ["", "K", "M", "B", "T", "Qa", "Qu", "S", "Oc", "No",
218 | "D", "Ud", "Dd", "Td", "Qt", "Qi", "Se", "Od", "Nd", "V",
219 | "Uv", "Dv", "Tv", "Qv", "Qx", "Sx", "Ox", "Nx", "Tn", "Qa",
220 | "Qu", "S", "Oc", "No", "D", "Ud", "Dd", "Td", "Qt", "Qi",
221 | "Se", "Od", "Nd", "V", "Uv", "Dv", "Tv", "Qv", "Qx", "Sx",
222 | "Ox", "Nx", "Tn", "x", "xx", "xxx", "X", "XX", "XXX", "END"]
223 |
224 | sci_expr = [1e0, 1e3, 1e6, 1e9, 1e12, 1e15, 1e18, 1e21, 1e24, 1e27,
225 | 1e30, 1e33, 1e36, 1e39, 1e42, 1e45, 1e48, 1e51, 1e54, 1e57,
226 | 1e60, 1e63, 1e66, 1e69, 1e72, 1e75, 1e78, 1e81, 1e84, 1e87,
227 | 1e90, 1e93, 1e96, 1e99, 1e102, 1e105, 1e108, 1e111, 1e114, 1e117,
228 | 1e120, 1e123, 1e126, 1e129, 1e132, 1e135, 1e138, 1e141, 1e144, 1e147,
229 | 1e150, 1e153, 1e156, 1e159, 1e162, 1e165, 1e168, 1e171, 1e174, 1e177]
230 | # fmt: on
231 |
232 | # Convert string to a number format if needed
233 | if isinstance(n, str):
234 | try:
235 | n = float(n)
236 | except ValueError:
237 | return n
238 |
239 | n = abs(n)
240 | for x in range(len(sci_expr)):
241 | if n >= sci_expr[x] and n < sci_expr[x + 1]:
242 | sufix = sufixes[x]
243 | if n >= 1e3:
244 | num = str(round_num(n / sci_expr[x], decimal))
245 | else:
246 | num = str(round_num(n, 0))
247 | if color:
248 | return f"{num}[highlight]{sufix}[/highlight]" if sufix else num
249 | else:
250 | return f"{num}{sufix}" if sufix else num
251 |
252 |
253 | def format_sys_table_memory(data):
254 | parsed_data = data.strip().split(" ")
255 | if len(parsed_data) == 2:
256 | value, suffix = parsed_data[0], parsed_data[1][:1]
257 |
258 | if value == "0":
259 | suffix = ""
260 | elif suffix != "b":
261 | suffix += "B"
262 | elif suffix == "b":
263 | suffix = "B"
264 |
265 | return f"{value}[highlight]{suffix}"
266 |
267 | return data
268 |
269 |
270 | def escape_markup(text: str) -> str:
271 | """
272 | Escape Rich markup characters in the given text.
273 |
274 | Args:
275 | text (str): The text to escape.
276 |
277 | Returns:
278 | str: The escaped text.
279 | """
280 | return text.replace("[", r"\[")
281 |
--------------------------------------------------------------------------------
/dolphie/Modules/ManualException.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 | from rich import box
3 | from rich.table import Table
4 |
5 | from dolphie.Modules.Functions import format_query, minify_query
6 |
7 |
8 | class ManualException(Exception):
9 | def __init__(self, reason: str, query: str = "", code: int = None):
10 | self.reason = reason
11 | self.query = query
12 | self.code = code
13 |
14 | def output(self):
15 | table_exception = Table(box=box.SQUARE, show_header=True, style="#ec8888")
16 |
17 | table_exception.add_column("MySQL Connection Error", overflow="fold")
18 |
19 | logger_message = []
20 |
21 | if self.query:
22 | table_exception.add_row("[red]Failed to execute query:[/red]")
23 | table_exception.add_row(format_query(self.query, minify=False))
24 | table_exception.add_row("")
25 |
26 | logger_message.append(f"Query: {minify_query(self.query)}")
27 |
28 | if self.reason:
29 | # pymysql for some reason returns "ny connections" instead of "Too many connections"
30 | if isinstance(self.reason, str):
31 | self.reason = self.reason.replace("ny connections", "Too many connections")
32 |
33 | table_exception.add_row(self.reason)
34 | logger_message.append(self.reason)
35 |
36 | if logger_message:
37 | logger.critical("\n".join(logger_message))
38 |
39 | return table_exception
40 |
--------------------------------------------------------------------------------
/dolphie/Modules/MySQL.py:
--------------------------------------------------------------------------------
1 | import re
2 | import string
3 | import time
4 | from ssl import SSLError
5 |
6 | import pymysql
7 | from loguru import logger
8 | from textual.app import App
9 |
10 | from dolphie.DataTypes import ConnectionSource
11 | from dolphie.Modules.ManualException import ManualException
12 | from dolphie.Modules.Queries import MySQLQueries, ProxySQLQueries
13 |
14 |
15 | class Database:
16 | def __init__(
17 | self,
18 | app: App,
19 | host: str,
20 | user: str,
21 | password: str,
22 | socket: str,
23 | port: int,
24 | ssl: str,
25 | save_connection_id: bool = True,
26 | auto_connect: bool = True,
27 | daemon_mode: bool = False,
28 | ):
29 | self.app = app
30 | self.host = host
31 | self.user = user
32 | self.password = password
33 | self.socket = socket
34 | self.port = port
35 | self.ssl = ssl
36 | self.save_connection_id = save_connection_id
37 | self.daemon_mode = daemon_mode
38 |
39 | self.connection: pymysql.Connection = None
40 | self.connection_id: int = None
41 | self.source: ConnectionSource = None
42 | self.is_running_query: bool = False
43 | self.has_connected: bool = False
44 |
45 | # Pre-compile regex pattern to filter non-printable characters
46 | self.non_printable_regex = re.compile(f"[^{re.escape(string.printable)}]")
47 |
48 | if daemon_mode:
49 | self.max_reconnect_attempts: int = 999999999
50 | else:
51 | self.max_reconnect_attempts: int = 3
52 |
53 | if auto_connect:
54 | self.connect()
55 |
56 | def connect(self, reconnect_attempt: bool = False):
57 | try:
58 | self.connection = pymysql.connect(
59 | host=self.host,
60 | user=self.user,
61 | passwd=self.password,
62 | unix_socket=self.socket,
63 | port=int(self.port),
64 | use_unicode=False,
65 | ssl=self.ssl,
66 | autocommit=True,
67 | connect_timeout=5,
68 | program_name="Dolphie",
69 | )
70 | self.cursor = self.connection.cursor(pymysql.cursors.DictCursor)
71 |
72 | # If the query is successful, then the connection is to ProxySQL
73 | try:
74 | self.cursor.execute("SELECT @@admin-version")
75 | self.source = ConnectionSource.proxysql
76 | except Exception:
77 | self.source = ConnectionSource.mysql
78 |
79 | # Get connection ID for processlist filtering
80 | if self.save_connection_id:
81 | self.connection_id = self.connection.thread_id()
82 |
83 | # We don't want any SQL modes to be set to avoid unexpected behavior between MySQL & MariaDB
84 | if self.source == ConnectionSource.mysql:
85 | self.execute("SET SESSION sql_mode=''")
86 |
87 | logger.info(f"Connected to {self.source} with Process ID {self.connection_id}")
88 | self.has_connected = True
89 | except pymysql.Error as e:
90 | if reconnect_attempt:
91 | logger.error(f"Failed to reconnect to {self.source}: {e.args[1]}")
92 | escaped_error_message = e.args[1].replace("[", "\\[")
93 | self.app.notify(
94 | (
95 | f"[$b_light_blue]{self.host}:{self.port}[/$b_light_blue]: "
96 | f"Failed to reconnect to MySQL: {escaped_error_message}"
97 | ),
98 | title="MySQL Reconnection Failed",
99 | severity="error",
100 | timeout=10,
101 | )
102 | else:
103 | if len(e.args) == 1:
104 | raise ManualException(e.args[0])
105 | else:
106 | raise ManualException(e.args[1])
107 | except FileNotFoundError: # Catch SSL file path errors
108 | raise ManualException("SSL certificate file path isn't valid!")
109 | except SSLError as e:
110 | raise ManualException(f"SSL error: {e}")
111 |
112 | def close(self):
113 | if self.is_connected():
114 | self.connection.close()
115 |
116 | def is_connected(self) -> bool:
117 | return self.connection and self.connection.open
118 |
119 | def _process_row(self, row):
120 | return {field: self._decode_value(value) for field, value in row.items()}
121 |
122 | def _decode_value(self, value):
123 | if isinstance(value, (bytes, bytearray)):
124 | # First attempt: UTF-8
125 | try:
126 | decoded_value = value.decode("utf-8")
127 | except UnicodeDecodeError:
128 | # Second attempt: Latin-1
129 | try:
130 | decoded_value = value.decode("latin-1")
131 | except UnicodeDecodeError:
132 | # Fallback: Hex representation
133 | return f"/* Failed to decode query, returning hex: {value.hex()} */"
134 |
135 | return self.non_printable_regex.sub("?", decoded_value)
136 |
137 | return value
138 |
139 | def fetchall(self):
140 | if not self.is_connected():
141 | return []
142 |
143 | rows = self.cursor.fetchall()
144 | return [self._process_row(row) for row in rows] if rows else []
145 |
146 | def fetchone(self):
147 | if not self.is_connected():
148 | return {}
149 |
150 | row = self.cursor.fetchone()
151 | return self._process_row(row) if row else {}
152 |
153 | def fetch_value_from_field(self, query, field=None, values=None):
154 | if not self.is_connected():
155 | return None
156 |
157 | self.execute(query, values)
158 | data = self.cursor.fetchone()
159 |
160 | if not data:
161 | return None
162 |
163 | field = field or next(iter(data)) # Use field if provided, otherwise get first field
164 | value = data.get(field)
165 | return self._decode_value(value)
166 |
167 | def fetch_status_and_variables(self, command):
168 | if not self.is_connected():
169 | return None
170 |
171 | self.execute(
172 | getattr(ProxySQLQueries, command)
173 | if self.source == ConnectionSource.proxysql
174 | else getattr(MySQLQueries, command)
175 | )
176 | data = self.fetchall()
177 |
178 | if command in {"status", "variables", "mysql_stats"}:
179 | return {
180 | row["Variable_name"]: int(row["Value"]) if row["Value"].isnumeric() else row["Value"] for row in data
181 | }
182 | elif command == "innodb_metrics":
183 | return {row["NAME"]: int(row["COUNT"]) for row in data}
184 |
185 | def execute(self, query, values=None, ignore_error=False):
186 | if not self.is_connected():
187 | return None
188 |
189 | if self.is_running_query:
190 | self.app.notify(
191 | "Another query is already running, please repeat action",
192 | title="Unable to run multiple queries at the same time",
193 | severity="error",
194 | timeout=10,
195 | )
196 | return None
197 |
198 | # Prefix all queries with Dolphie so they can be easily identified in the processlist from other people
199 | if self.source != ConnectionSource.proxysql:
200 | query = "/* Dolphie */ " + query
201 |
202 | for attempt_number in range(self.max_reconnect_attempts):
203 | self.is_running_query = True
204 | error_message = None
205 |
206 | try:
207 | rows = self.cursor.execute(query, values)
208 | self.is_running_query = False
209 |
210 | return rows
211 | except AttributeError:
212 | # If the cursor is not defined, reconnect and try again
213 | self.is_running_query = False
214 |
215 | self.close()
216 | self.connect()
217 |
218 | time.sleep(1)
219 | except pymysql.Error as e:
220 | self.is_running_query = False
221 |
222 | if ignore_error:
223 | return None
224 | else:
225 | if len(e.args) == 1:
226 | error_code = e.args[0]
227 | else:
228 | error_code = e.args[0]
229 | if e.args[1]:
230 | error_message = e.args[1]
231 |
232 | # Check if the error is due to a connection issue or is in daemon mode and already has
233 | # an established connection. If so, attempt to exponential backoff reconnect
234 | if error_code in (0, 2006, 2013, 2055) or (self.daemon_mode and self.has_connected):
235 | # 0: Not connected to MySQL
236 | # 2006: MySQL server has gone away
237 | # 2013: Lost connection to MySQL server during query
238 | # 2055: Lost connection to MySQL server at hostname
239 |
240 | if error_message:
241 | logger.error(
242 | f"{self.source} has lost its connection: {error_message}, attempting to reconnect..."
243 | )
244 | # Escape [ and ] characters in the error message
245 | escaped_error_message = error_message.replace("[", "\\[")
246 | self.app.notify(
247 | f"[$b_light_blue]{self.host}:{self.port}[/$b_light_blue]: {escaped_error_message}",
248 | title="MySQL Connection Lost",
249 | severity="error",
250 | timeout=10,
251 | )
252 |
253 | self.close()
254 | self.connect(reconnect_attempt=True)
255 |
256 | if not self.connection.open:
257 | # Exponential backoff
258 | time.sleep(min(1 * (2**attempt_number), 20)) # Cap the wait time at 20 seconds
259 |
260 | # Skip the rest of the loop
261 | continue
262 |
263 | self.app.notify(
264 | f"[$b_light_blue]{self.host}:{self.port}[/$b_light_blue]: Successfully reconnected",
265 | title="MySQL Connection Created",
266 | severity="success",
267 | timeout=10,
268 | )
269 |
270 | # Retry the query
271 | return self.execute(query, values)
272 | else:
273 | raise ManualException(error_message, query=query, code=error_code)
274 |
275 | if not self.connection.open:
276 | raise ManualException(
277 | f"Failed to reconnect to {self.source} after {self.max_reconnect_attempts} attempts",
278 | query=query,
279 | code=error_code,
280 | )
281 |
--------------------------------------------------------------------------------
/dolphie/Modules/PerformanceSchemaMetrics.py:
--------------------------------------------------------------------------------
1 | import re
2 | from typing import Any, Dict, List
3 |
4 | from dolphie.Modules.Functions import minify_query
5 |
6 |
7 | class PerformanceSchemaMetrics:
8 | def __init__(self, query_data: List[Dict[str, Any]], metric_name: str, metric_key: str):
9 | self.metric_name = metric_name
10 | self.metric_key = metric_key
11 |
12 | # These are integer columns that should be ignored for delta calculations
13 | self.ignore_int_columns = ["quantile_95", "quantile_99"]
14 |
15 | self.filtered_data: Dict[str, Dict[str, Dict[str, int]]] = {}
16 | self.internal_data: Dict[str, Dict[str, Dict[str, Any]]] = {
17 | row[self.metric_key]: {
18 | "event_name": row.get("EVENT_NAME"),
19 | "metrics": {
20 | metric: {"total": value, "delta": 0, "delta_last_sample": 0}
21 | for metric, value in row.items()
22 | if isinstance(value, int) and metric not in self.ignore_int_columns
23 | },
24 | }
25 | for row in query_data
26 | }
27 |
28 | self.table_pattern = re.compile(r"([^/]+)/([^/]+)\.(frm|ibd|MYD|MYI|CSM|CSV|par)$")
29 | self.undo_logs_pattern = re.compile(r"undo_\d+$")
30 |
31 | self.events_to_combine = {
32 | "wait/io/file/innodb/innodb_temp_file": "Temporary files",
33 | "wait/io/file/sql/binlog": "Binary logs",
34 | "wait/io/file/sql/relaylog": "Relay logs",
35 | "wait/io/file/sql/io_cache": "IO cache",
36 | "wait/io/file/innodb/innodb_dblwr_file": "Doublewrite buffer",
37 | "wait/io/file/innodb/innodb_log_file": "InnoDB redo logs",
38 | "wait/io/file/sql/hash_join": "Hash joins",
39 | }
40 |
41 | def update_internal_data(self, query_data: List[Dict[str, int]]):
42 | # Track instances and remove missing ones
43 | current_instance_names = {row[self.metric_key] for row in query_data}
44 | instances_to_remove = set(self.internal_data) - current_instance_names
45 |
46 | # Process current query data
47 | for row in query_data:
48 | instance_name = row[self.metric_key]
49 | metrics = {
50 | metric: value
51 | for metric, value in row.items()
52 | if isinstance(value, int) and metric not in self.ignore_int_columns
53 | }
54 |
55 | # Initialize instance in internal_data if not present
56 | if instance_name not in self.internal_data:
57 | self.internal_data[instance_name] = {
58 | "event_name": row.get("EVENT_NAME"),
59 | "metrics": {
60 | metric: {"total": value, "delta": 0, "delta_last_sample": 0}
61 | for metric, value in metrics.items()
62 | },
63 | }
64 |
65 | deltas_changed = False
66 | all_deltas_zero = True
67 |
68 | # Update deltas for each metric
69 | for metric, current_value in metrics.items():
70 | metric_data = self.internal_data[instance_name]["metrics"][metric]
71 | initial_value = metric_data["total"]
72 | delta = current_value - initial_value
73 |
74 | metric_data["total"] = current_value
75 | if delta > 0:
76 | metric_data["delta"] += delta
77 | metric_data["delta_last_sample"] = delta
78 | deltas_changed = True
79 |
80 | if metric_data["delta"] > 0:
81 | all_deltas_zero = False
82 |
83 | # Update filtered_data with new values if deltas changed or instance is new
84 | if deltas_changed or instance_name not in self.filtered_data:
85 | self.filtered_data[instance_name] = {}
86 |
87 | for metric, values in self.internal_data[instance_name]["metrics"].items():
88 | # Update total with the new value (whether or not delta is positive)
89 | total = values["total"]
90 |
91 | # Only add delta if it's greater than 0
92 | delta = values["delta"] if values["delta"] > 0 else 0
93 | delta_last_sample = values["delta_last_sample"] if values["delta_last_sample"] > 0 else 0
94 |
95 | # Only include the metric in filtered_data if it has a delta greater than 0
96 | if delta > 0:
97 | self.filtered_data[instance_name][metric] = {
98 | "t": total,
99 | "d": delta,
100 | "d_last_sample": delta_last_sample,
101 | }
102 | else:
103 | self.filtered_data[instance_name][metric] = {"t": total}
104 |
105 | if (
106 | self.metric_name == "statements_summary"
107 | and "schema_name" not in self.filtered_data[instance_name]
108 | ):
109 | self.filtered_data[instance_name]["schema_name"] = row.get("schema_name")
110 | self.filtered_data[instance_name]["digest_text"] = minify_query(row.get("digest_text"))
111 | self.filtered_data[instance_name]["query_sample_text"] = minify_query(
112 | row.get("query_sample_text")
113 | )
114 | self.filtered_data[instance_name]["quantile_95"] = row.get("quantile_95")
115 | self.filtered_data[instance_name]["quantile_99"] = row.get("quantile_99")
116 |
117 | if all_deltas_zero:
118 | self.filtered_data.pop(instance_name, None)
119 |
120 | # Remove instances no longer in the query data
121 | for instance_name in instances_to_remove:
122 | del self.internal_data[instance_name]
123 |
124 | if self.metric_name == "file_io":
125 | self.aggregate_and_combine_data()
126 |
127 | def aggregate_and_combine_data(self):
128 | combined_results = {}
129 |
130 | # Aggregate deltas for combined events and instances matching the regex
131 | for instance_name, instance_data in self.internal_data.items():
132 | event_name = instance_data["event_name"]
133 |
134 | # Determine the target name based on instance name pattern or specific event name
135 | if self.undo_logs_pattern.search(instance_name):
136 | target_name = "Undo Logs"
137 | elif event_name in self.events_to_combine:
138 | target_name = self.events_to_combine[event_name]
139 | else:
140 | continue # Skip if it doesn't match any pattern or event to combine
141 |
142 | # Remove original instance from filtered_data if it exists
143 | self.filtered_data.pop(instance_name, None)
144 |
145 | # Initialize target in combined results if not already present
146 | target_metrics = combined_results.setdefault(target_name, {})
147 |
148 | # Accumulate metrics for each matched or combined event
149 | for metric_name, metric_data in instance_data["metrics"].items():
150 | combined_metric = target_metrics.setdefault(metric_name, {"total": 0, "delta": 0})
151 | combined_metric["total"] += metric_data["total"]
152 | combined_metric["delta"] += metric_data["delta"]
153 |
154 | # Update filtered_data with combined results only if SUM_TIMER_WAIT delta > 0
155 | for combined_name, combined_metrics in combined_results.items():
156 | # Skip if SUM_TIMER_WAIT delta is 0
157 | if combined_metrics.get("SUM_TIMER_WAIT", {}).get("delta", 0) > 0:
158 | self.filtered_data[combined_name] = {
159 | metric_name: {"t": combined_data["total"], "d": combined_data["delta"]}
160 | for metric_name, combined_data in combined_metrics.items()
161 | }
162 |
163 | # Clean up filtered_data by removing instances with SUM_TIMER_WAIT delta of 0
164 | self.filtered_data = {
165 | instance_name: instance_metrics
166 | for instance_name, instance_metrics in self.filtered_data.items()
167 | if instance_metrics.get("SUM_TIMER_WAIT", {}).get("d", 0) != 0
168 | }
169 |
--------------------------------------------------------------------------------
/dolphie/Panels/DDL.py:
--------------------------------------------------------------------------------
1 | from textual.widgets import DataTable
2 |
3 | from dolphie.Modules.Functions import format_bytes, format_time
4 | from dolphie.Modules.TabManager import Tab
5 |
6 |
7 | def create_panel(tab: Tab) -> DataTable:
8 | dolphie = tab.dolphie
9 |
10 | columns = {
11 | "processlist_id": {"name": "Thread ID", "width": 11, "format": None},
12 | "percentage_completed": {"name": "Completed", "width": 9, "format": None},
13 | "memory": {"name": "Memory", "width": 10, "format": "bytes"},
14 | "started_ago": {"name": "Current Time", "width": 12, "format": "time"},
15 | "estimated_remaining_time": {"name": "Remaining Time", "width": 14, "format": "time"},
16 | "state": {"name": "State", "width": None, "format": None},
17 | }
18 |
19 | ddl_datatable = tab.ddl_datatable
20 | ddl_datatable.clear(columns=True)
21 |
22 | for column_key, column_data in columns.items():
23 | ddl_datatable.add_column(column_data["name"], key=column_key, width=column_data["width"])
24 |
25 | for ddl in dolphie.ddl:
26 | row_values = []
27 |
28 | for column_key, column_data in columns.items():
29 | column_format = column_data["format"]
30 |
31 | if column_format == "time":
32 | value = format_time(ddl[column_key], picoseconds=True)
33 | elif column_format == "bytes":
34 | value = format_bytes(ddl[column_key])
35 | else:
36 | value = ddl[column_key]
37 |
38 | row_values.append(value)
39 |
40 | ddl_datatable.add_row(*row_values, key=ddl["processlist_id"])
41 |
42 | tab.ddl_title.update(
43 | f"{dolphie.panels.get_panel_title(dolphie.panels.ddl.name)} "
44 | f"([$highlight]{ddl_datatable.row_count}[/$highlight])"
45 | )
46 |
--------------------------------------------------------------------------------
/dolphie/Panels/Dashboard.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 | from rich.style import Style
4 | from rich.table import Table
5 |
6 | from dolphie.Modules.Functions import format_bytes, format_number
7 | from dolphie.Modules.MetricManager import MetricData
8 | from dolphie.Modules.MySQL import ConnectionSource
9 | from dolphie.Modules.TabManager import Tab
10 | from dolphie.Panels import Replication as ReplicationPanel
11 |
12 |
13 | def create_panel(tab: Tab) -> Table:
14 | dolphie = tab.dolphie
15 |
16 | global_status = dolphie.global_status
17 | global_variables = dolphie.global_variables
18 | binlog_status = dolphie.binlog_status
19 |
20 | table_title_style = Style(color="#bbc8e8", bold=True)
21 |
22 | ####################
23 | # Host Information #
24 | ####################
25 | table_information = Table(
26 | show_header=False,
27 | box=None,
28 | title=f"{dolphie.panels.get_key(dolphie.panels.dashboard.name)}Host Information",
29 | title_style=table_title_style,
30 | )
31 |
32 | if dolphie.replicaset:
33 | host_type = "InnoDB ReplicaSet"
34 | elif dolphie.innodb_cluster_read_replica:
35 | host_type = "InnoDB Cluster Read Replica"
36 | elif dolphie.innodb_cluster:
37 | host_type = "InnoDB Cluster"
38 | elif dolphie.group_replication:
39 | host_type = "Group Replication"
40 | elif dolphie.galera_cluster:
41 | host_type = "Galera Cluster"
42 | else:
43 | if dolphie.connection_source_alt == ConnectionSource.mariadb:
44 | host_type = "MariaDB"
45 | else:
46 | host_type = "MySQL"
47 |
48 | replicas = 0
49 | if dolphie.replica_manager.available_replicas:
50 | replicas = len(dolphie.replica_manager.available_replicas)
51 |
52 | table_information.add_column()
53 | table_information.add_column(min_width=25, max_width=35)
54 | table_information.add_row("[label]Version", f"{dolphie.host_distro} {dolphie.host_version}")
55 | if global_variables.get("version_compile_os") and global_variables.get("version_compile_machine"):
56 | table_information.add_row(
57 | "[label]", "%s (%s)" % (global_variables["version_compile_os"], global_variables["version_compile_machine"])
58 | )
59 | table_information.add_row("[label]Type", host_type)
60 | table_information.add_row("[label]Uptime", str(timedelta(seconds=global_status["Uptime"])))
61 | table_information.add_row("[label]Replicas", "%s" % replicas)
62 | table_information.add_row(
63 | "[label]Threads",
64 | "[label]con[/label] %s[highlight]/[/highlight][label]run[/label]"
65 | " %s[highlight]/[/highlight][label]cac[/label] %s"
66 | % (
67 | format_number(global_status["Threads_connected"]),
68 | format_number(global_status["Threads_running"]),
69 | format_number(global_status["Threads_cached"]),
70 | ),
71 | )
72 | table_information.add_row(
73 | "[label]Tables",
74 | "[label]open[/label] %s[highlight]/[/highlight][label]opened[/label] %s"
75 | % (
76 | format_number(global_status["Open_tables"]),
77 | format_number(global_status["Opened_tables"]),
78 | ),
79 | )
80 | if not dolphie.replay_file:
81 | runtime = str(datetime.now() - dolphie.dolphie_start_time).split(".")[0]
82 | table_information.add_row(
83 | "[label]Runtime", f"{runtime} [label]Latency[/label] {round(dolphie.worker_processing_time, 2)}s"
84 | )
85 | else:
86 | if dolphie.worker_processing_time:
87 | table_information.add_row("[label]Latency", f"{round(dolphie.worker_processing_time, 2)}s")
88 |
89 | tab.dashboard_section_1.update(table_information)
90 |
91 | ######################
92 | # System Utilization #
93 | ######################
94 | table = create_system_utilization_table(tab)
95 |
96 | if table:
97 | tab.dashboard_section_6.update(create_system_utilization_table(tab))
98 |
99 | ###########
100 | # InnoDB #
101 | ###########
102 | table_innodb = Table(show_header=False, box=None, title="InnoDB", title_style=table_title_style)
103 |
104 | table_innodb.add_column()
105 | table_innodb.add_column(width=9)
106 |
107 | # Calculate InnoDB memory read hit efficiency
108 | ib_pool_disk_reads = global_status.get("Innodb_buffer_pool_reads", 0)
109 | ib_pool_mem_reads = global_status.get(
110 | "Innodb_buffer_pool_read_requests", 1
111 | ) # Default to 1 to avoid division by zero
112 |
113 | if ib_pool_disk_reads >= ib_pool_mem_reads:
114 | innodb_efficiency = "[red]0.00%"
115 | else:
116 | efficiency = 100 - (ib_pool_disk_reads / ib_pool_mem_reads * 100)
117 |
118 | if efficiency > 90:
119 | color_code = "green"
120 | elif efficiency > 80:
121 | color_code = "yellow"
122 | else:
123 | color_code = "red"
124 |
125 | innodb_efficiency = f"[{color_code}]{efficiency:.2f}%"
126 |
127 | # Add data to our table
128 | table_innodb.add_row("[label]Read Hit", innodb_efficiency)
129 | table_innodb.add_row("[label]Chkpt Age", dolphie.metric_manager.get_metric_checkpoint_age(format=True))
130 | table_innodb.add_row("[label]AHI Hit", dolphie.metric_manager.get_metric_adaptive_hash_index())
131 |
132 | bp_instances = global_variables.get("innodb_buffer_pool_instances", 1)
133 | plural = "s" if bp_instances > 1 else ""
134 | table_innodb.add_row(f"[label]BP Instance{plural}", format_number(bp_instances))
135 |
136 | table_innodb.add_row("[label]BP Size", format_bytes(global_variables["innodb_buffer_pool_size"]))
137 | table_innodb.add_row(
138 | "[label]BP Available",
139 | format_bytes(
140 | dolphie.global_variables["innodb_buffer_pool_size"] - dolphie.global_status["Innodb_buffer_pool_bytes_data"]
141 | ),
142 | )
143 | table_innodb.add_row("[label]BP Dirty", format_bytes(global_status["Innodb_buffer_pool_bytes_dirty"]))
144 | table_innodb.add_row(
145 | "[label]History List", format_number(dolphie.innodb_metrics.get("trx_rseg_history_len", "N/A"))
146 | )
147 |
148 | tab.dashboard_section_2.update(table_innodb)
149 |
150 | ##############
151 | # Binary Log #
152 | ##############
153 | table_primary = Table(show_header=False, box=None, title="Binary Log", title_style=table_title_style)
154 |
155 | if global_variables.get("log_bin") == "OFF" or not binlog_status.get("File"):
156 | tab.dashboard_section_3.display = False
157 | else:
158 | tab.dashboard_section_3.display = True
159 | table_primary.add_column()
160 | table_primary.add_column(max_width=40)
161 |
162 | binlog_cache = 100
163 | binlog_cache_disk = global_status["Binlog_cache_disk_use"]
164 | binlog_cache_mem = global_status["Binlog_cache_use"]
165 | if binlog_cache_disk and binlog_cache_mem:
166 | if binlog_cache_disk >= binlog_cache_mem:
167 | innodb_efficiency = "[red]0.00%"
168 | elif binlog_cache_mem > binlog_cache_disk:
169 | binlog_cache = round(100 - (binlog_cache_disk / binlog_cache_mem), 2)
170 |
171 | table_primary.add_row("[label]File name", binlog_status["File"])
172 | table_primary.add_row(
173 | "[label]Position",
174 | "%s" % (str(binlog_status["Position"])),
175 | )
176 | table_primary.add_row(
177 | "[label]Size",
178 | "%s" % format_bytes(binlog_status["Position"]),
179 | )
180 | table_primary.add_row("[label]Diff", format_bytes(binlog_status["Diff_Position"]))
181 | table_primary.add_row("[label]Cache Hit", f"{binlog_cache}%")
182 |
183 | binlog_format = global_variables.get("binlog_format", "N/A")
184 | binlog_row_image = None
185 | if binlog_format == "ROW":
186 | binlog_row_image = global_variables.get("binlog_row_image", "N/A")
187 | table_primary.add_row("[label]Format", "{} ({})".format(binlog_format, binlog_row_image))
188 | else:
189 | table_primary.add_row("[label]Format", binlog_format, binlog_row_image)
190 |
191 | if dolphie.connection_source_alt == ConnectionSource.mariadb:
192 | table_primary.add_row("[label]Encrypt", global_variables.get("encrypt_binlog", "N/A"))
193 | else:
194 | table_primary.add_row("[label]GTID", global_variables.get("gtid_mode", "N/A"))
195 | table_primary.add_row("[label]Compression", global_variables.get("binlog_transaction_compression", "N/A"))
196 |
197 | tab.dashboard_section_3.update(table_primary)
198 |
199 | ###############
200 | # Replication #
201 | ###############
202 | if dolphie.replication_status and not dolphie.panels.replication.visible:
203 | tab.dashboard_section_5.display = True
204 | tab.dashboard_section_5.update(ReplicationPanel.create_replication_table(tab, dashboard_table=True))
205 | else:
206 | tab.dashboard_section_5.display = False
207 | ###############
208 | # Statistics #
209 | ###############
210 | table_stats = Table(show_header=False, box=None, title="Statistics/s", title_style=table_title_style)
211 |
212 | table_stats.add_column()
213 | table_stats.add_column(min_width=6)
214 |
215 | # Add DML statistics
216 | metrics = dolphie.metric_manager.metrics.dml
217 | metric_labels = {
218 | "Queries": "Queries",
219 | "SELECT": "Com_select",
220 | "INSERT": "Com_insert",
221 | "UPDATE": "Com_update",
222 | "DELETE": "Com_delete",
223 | "REPLACE": "Com_replace",
224 | "COMMIT": "Com_commit",
225 | "ROLLBACK": "Com_rollback",
226 | }
227 |
228 | for label, metric_name in metric_labels.items():
229 | metric_data: MetricData = getattr(metrics, metric_name)
230 |
231 | if metric_data.values:
232 | table_stats.add_row(f"[label]{label}", format_number(metric_data.values[-1]))
233 | else:
234 | table_stats.add_row(f"[label]{label}", "0")
235 |
236 | tab.dashboard_section_4.update(table_stats)
237 |
238 |
239 | def create_system_utilization_table(tab: Tab) -> Table:
240 | dolphie = tab.dolphie
241 |
242 | if not dolphie.system_utilization:
243 | return None
244 |
245 | table = Table(
246 | show_header=False, box=None, title="System Utilization", title_style=Style(color="#bbc8e8", bold=True)
247 | )
248 | table.add_column()
249 | table.add_column(min_width=18, max_width=25)
250 |
251 | def format_percent(value, thresholds=(80, 90), colors=("green", "yellow", "red")):
252 | if value > thresholds[1]:
253 | return f"[{colors[2]}]{value}%[/{colors[2]}]"
254 | elif value > thresholds[0]:
255 | return f"[{colors[1]}]{value}%[/{colors[1]}]"
256 | return f"[{colors[0]}]{value}%[/{colors[0]}]"
257 |
258 | # Uptime
259 | uptime = dolphie.system_utilization.get("Uptime", "N/A")
260 | table.add_row("[label]Uptime", str(timedelta(seconds=uptime)) if uptime != "N/A" else "N/A")
261 |
262 | # CPU
263 | cpu_percent_values = dolphie.metric_manager.metrics.system_cpu.CPU_Percent.values
264 | if cpu_percent_values:
265 | cpu_percent = round(cpu_percent_values[-1], 2)
266 | formatted_cpu_percent = format_percent(cpu_percent)
267 | cpu_cores = dolphie.system_utilization.get("CPU_Count", "N/A")
268 | table.add_row("[label]CPU", f"{formatted_cpu_percent} [label]cores[/label] {cpu_cores}")
269 | else:
270 | table.add_row("[label]CPU", "N/A")
271 |
272 | # CPU Load
273 | load_averages = dolphie.system_utilization.get("CPU_Load_Avg")
274 | if load_averages:
275 | formatted_load = " ".join(f"{avg:.2f}" for avg in load_averages)
276 | table.add_row("[label]Load", formatted_load)
277 |
278 | # Memory
279 | memory_used = dolphie.metric_manager.metrics.system_memory.Memory_Used.last_value
280 | memory_total = dolphie.metric_manager.metrics.system_memory.Memory_Total.last_value
281 | if memory_used and memory_total:
282 | memory_percent_used = round((memory_used / memory_total) * 100, 2)
283 | formatted_memory_percent_used = format_percent(memory_percent_used)
284 | table.add_row(
285 | "[label]Memory",
286 | (
287 | f"{formatted_memory_percent_used}\n{format_bytes(memory_used)}"
288 | f"[dark_gray]/[/dark_gray]{format_bytes(memory_total)}"
289 | ),
290 | )
291 | else:
292 | table.add_row("[label]Memory", "N/A\n")
293 |
294 | # Swap
295 | swap_used = format_bytes(dolphie.system_utilization.get("Swap_Used", "N/A"))
296 | swap_total = format_bytes(dolphie.system_utilization.get("Swap_Total", "N/A"))
297 | table.add_row("[label]Swap", f"{swap_used}[dark_gray]/[/dark_gray]{swap_total}")
298 |
299 | # Disk I/O
300 | disk_read_values = dolphie.metric_manager.metrics.system_disk_io.Disk_Read.values
301 | disk_write_values = dolphie.metric_manager.metrics.system_disk_io.Disk_Write.values
302 | if disk_read_values and disk_write_values:
303 | last_disk_read = format_number(disk_read_values[-1])
304 | last_disk_write = format_number(disk_write_values[-1])
305 | table.add_row("[label]Disk", f"[label]IOPS R[/label] {last_disk_read}\n[label]IOPS W[/label] {last_disk_write}")
306 | else:
307 | table.add_row("[label]Disk", "[label]IOPS R[/label] N/A\n[label]IOPS W[/label] N/A")
308 |
309 | return table
310 |
--------------------------------------------------------------------------------
/dolphie/Panels/MetadataLocks.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List, Union
2 |
3 | from rich.syntax import Syntax
4 | from textual.widgets import DataTable
5 |
6 | from dolphie.Modules.Functions import format_query, format_time
7 | from dolphie.Modules.Queries import MySQLQueries
8 | from dolphie.Modules.TabManager import Tab
9 |
10 |
11 | def create_panel(tab: Tab) -> DataTable:
12 | dolphie = tab.dolphie
13 |
14 | columns = {
15 | "OBJECT_TYPE": {"name": "Object Type", "width": 13},
16 | "OBJECT_SCHEMA": {"name": "Object Schema", "width": 13},
17 | "OBJECT_NAME": {"name": "Object Name", "width": 25},
18 | "LOCK_TYPE": {"name": "Lock Type", "width": 20},
19 | "LOCK_STATUS": {"name": "Lock Status", "width": 11},
20 | "CODE_SOURCE": {"name": "Code Source", "width": 15},
21 | "THREAD_SOURCE": {"name": "Thread Source", "width": 15},
22 | "PROCESSLIST_ID": {"name": "Process ID", "width": 13},
23 | "PROCESSLIST_USER": {"name": "User", "width": 20},
24 | "PROCESSLIST_TIME": {"name": "Age", "width": 8},
25 | "PROCESSLIST_INFO": {"name": "Query", "width": None},
26 | }
27 |
28 | # Refresh optimization
29 | query_length_max = 300
30 | metadata_locks_datatable = tab.metadata_locks_datatable
31 |
32 | if not metadata_locks_datatable.columns:
33 | for column_key, column_data in columns.items():
34 | column_name = column_data["name"]
35 | metadata_locks_datatable.add_column(column_name, key=column_name, width=column_data["width"])
36 |
37 | for lock in dolphie.metadata_locks:
38 | lock_id = str(lock["id"])
39 | row_values = []
40 | row_height = 1
41 |
42 | for column_id, (column_key, column_data) in enumerate(columns.items()):
43 | column_name = column_data["name"]
44 | column_value = lock[column_key]
45 |
46 | # Get height of row based on the how many objects are in the OBJECT_NAME field
47 | if (
48 | column_key == "OBJECT_NAME"
49 | and column_value
50 | and len(column_value) > column_data["width"]
51 | and "," in column_value
52 | ):
53 | # Truncate the object names to the width of the column
54 | object_names = [object_name[: column_data["width"]] for object_name in column_value.split(",")]
55 | thread_value = "\n".join(object_names)
56 |
57 | row_height = len(object_names)
58 | else:
59 | thread_value = format_value(lock, column_key, column_value)
60 |
61 | if lock_id in metadata_locks_datatable.rows:
62 | datatable_value = metadata_locks_datatable.get_row(lock_id)[column_id]
63 |
64 | temp_thread_value = thread_value
65 | temp_datatable_value = datatable_value
66 |
67 | # If the column is the query, we need to compare the code of the Syntax object
68 | if column_key == "PROCESSLIST_INFO":
69 | if isinstance(thread_value, Syntax):
70 | temp_thread_value = thread_value.code[:query_length_max]
71 | thread_value = format_query(temp_thread_value)
72 | if isinstance(datatable_value, Syntax):
73 | temp_datatable_value = datatable_value.code
74 |
75 | # Update the datatable if values differ
76 | if temp_thread_value != temp_datatable_value:
77 | metadata_locks_datatable.update_cell(lock_id, column_name, thread_value)
78 | else:
79 | # Only show the first {query_length_max} characters of the query
80 | if column_name == "Query" and isinstance(thread_value, Syntax):
81 | thread_value = format_query(thread_value.code[:query_length_max])
82 |
83 | # Create an array of values to append to the datatable
84 | row_values.append(thread_value)
85 |
86 | # Add a new row to the datatable
87 | if row_values:
88 | metadata_locks_datatable.add_row(*row_values, key=lock_id, height=row_height)
89 |
90 | # Find the ids that exist in datatable but not in metadata_locks
91 | if dolphie.metadata_locks:
92 | rows_to_remove = set(metadata_locks_datatable.rows.keys()) - {
93 | str(lock["id"]) for lock in dolphie.metadata_locks
94 | }
95 | for id in rows_to_remove:
96 | metadata_locks_datatable.remove_row(id)
97 | else:
98 | if metadata_locks_datatable.row_count:
99 | metadata_locks_datatable.clear()
100 |
101 | metadata_locks_datatable.sort("Age", reverse=dolphie.sort_by_time_descending)
102 |
103 | tab.metadata_locks_title.update(
104 | f"{dolphie.panels.get_panel_title(dolphie.panels.metadata_locks.name)} "
105 | f"([$highlight]{metadata_locks_datatable.row_count}[/$highlight])"
106 | )
107 |
108 |
109 | def fetch_data(tab: Tab) -> List[Dict[str, Union[int, str]]]:
110 | dolphie = tab.dolphie
111 |
112 | ########################
113 | # WHERE clause filters #
114 | ########################
115 | where_clause = []
116 |
117 | # Filter user
118 | if dolphie.user_filter:
119 | where_clause.append("processlist_user = '%s'" % dolphie.user_filter)
120 |
121 | # Filter database
122 | if dolphie.db_filter:
123 | where_clause.append("processlist_db = '%s'" % dolphie.db_filter)
124 |
125 | # Filter hostname/IP
126 | if dolphie.host_filter:
127 | # Have to use LIKE since there's a port at the end
128 | where_clause.append("processlist_host LIKE '%s%%'" % dolphie.host_filter)
129 |
130 | # Filter time
131 | if dolphie.query_time_filter:
132 | where_clause.append("processlist_time >= '%s'" % dolphie.query_time_filter)
133 |
134 | # Filter query
135 | if dolphie.query_filter:
136 | where_clause.append("(processlist_info LIKE '%%%s%%')" % (dolphie.query_filter))
137 |
138 | if where_clause:
139 | # Add in our dynamic WHERE clause for filtering
140 | query = MySQLQueries.metadata_locks.replace("$1", "AND " + " AND ".join(where_clause))
141 | else:
142 | query = MySQLQueries.metadata_locks.replace("$1", "")
143 |
144 | dolphie.main_db_connection.execute(query)
145 | threads = dolphie.main_db_connection.fetchall()
146 |
147 | return threads
148 |
149 |
150 | def format_value(lock: dict, column_key: str, value: str) -> str:
151 | formatted_value = value
152 |
153 | # OBJECT_NAME is in the format "schema/table" sometimes where OBJECT_SCHEMA is empty,
154 | # so I want to split OBJECT_NAME and correct it if necessary
155 | if column_key == "OBJECT_SCHEMA" and not value and lock["OBJECT_NAME"] and "/" in lock["OBJECT_NAME"]:
156 | formatted_value = lock["OBJECT_NAME"].split("/")[0]
157 | elif column_key == "OBJECT_NAME" and value and "/" in value:
158 | formatted_value = value.split("/")[1]
159 | elif value is None or value == "":
160 | formatted_value = "[dark_gray]N/A"
161 | elif column_key == "PROCESSLIST_INFO":
162 | formatted_value = format_query(value)
163 | elif column_key == "LOCK_STATUS":
164 | if value == "GRANTED":
165 | formatted_value = f"[green]{value}[/green]"
166 | elif value == "PENDING":
167 | formatted_value = f"[red]{value}[/red]"
168 | elif column_key == "LOCK_TYPE":
169 | if value == "EXCLUSIVE":
170 | formatted_value = f"[yellow]{value}[/yellow]"
171 | elif column_key == "PROCESSLIST_TIME":
172 | formatted_value = format_time(value)
173 | elif column_key == "CODE_SOURCE":
174 | formatted_value = value.split(":")[0]
175 | elif column_key == "THREAD_SOURCE":
176 | formatted_value = value.split("/")[-1]
177 |
178 | if formatted_value == "one_connection":
179 | formatted_value = "user_connection"
180 |
181 | return formatted_value
182 |
--------------------------------------------------------------------------------
/dolphie/Panels/PerformanceSchemaMetrics.py:
--------------------------------------------------------------------------------
1 | import os
2 | from datetime import datetime
3 |
4 | from textual.widgets import DataTable
5 |
6 | from dolphie.Modules.Functions import format_bytes, format_number, format_time
7 | from dolphie.Modules.TabManager import Tab
8 |
9 |
10 | def create_panel(tab: Tab):
11 | dolphie = tab.dolphie
12 |
13 | update_file_io_by_instance(tab)
14 | update_table_io_waits_summary_by_table(tab)
15 |
16 | if dolphie.replay_file:
17 | time = dolphie.global_status.get("replay_pfs_metrics_last_reset_time", 0)
18 | else:
19 | time = (
20 | (datetime.now() - dolphie.pfs_metrics_last_reset_time).total_seconds()
21 | if dolphie.pfs_metrics_last_reset_time
22 | else 0
23 | )
24 | tab.pfs_metrics_delta.label = f"Delta since last reset ([$light_blue]{format_time(time)}[/$light_blue])"
25 |
26 |
27 | def update_table_io_waits_summary_by_table(tab: Tab) -> DataTable:
28 | dolphie = tab.dolphie
29 | datatable = tab.pfs_metrics_table_io_waits_datatable
30 |
31 | if not dolphie.table_io_waits_data or not dolphie.table_io_waits_data.filtered_data:
32 | datatable.display = False
33 | tab.pfs_metrics_tabs.get_tab("pfs_metrics_table_io_waits_tab").label = (
34 | "Table I/O Waits ([$highlight]0[/$highlight])"
35 | )
36 |
37 | return
38 |
39 | datatable.display = True
40 |
41 | columns = {
42 | "Table": {"field": "TABLE_NAME", "width": None},
43 | "Total": {"field": ["COUNT_STAR", "SUM_TIMER_WAIT"], "width": 23},
44 | "Fetch": {"field": ["COUNT_FETCH", "SUM_TIMER_FETCH"], "width": 23},
45 | "Insert": {"field": ["COUNT_INSERT", "SUM_TIMER_INSERT"], "width": 23},
46 | "Update": {"field": ["COUNT_UPDATE", "SUM_TIMER_UPDATE"], "width": 23},
47 | "Delete": {"field": ["COUNT_DELETE", "SUM_TIMER_DELETE"], "width": 23},
48 | "wait_time_ps": {"field": "SUM_TIMER_WAIT", "width": 0},
49 | }
50 |
51 | # Add columns to the datatable if it is empty
52 | if not datatable.columns:
53 | for column_key, column_data in columns.items():
54 | column_width = column_data["width"]
55 | datatable.add_column(column_key, key=column_key, width=column_width)
56 |
57 | data = dolphie.table_io_waits_data.filtered_data
58 | for file_name, metrics in data.items():
59 | row_id = file_name
60 | row_values = []
61 |
62 | row_values.append(file_name)
63 |
64 | # Check if row already exists before processing columns
65 | if row_id in datatable.rows:
66 | row_exists = True
67 | else:
68 | row_exists = False
69 |
70 | for column_id, (column_name, column_data) in enumerate(columns.items()):
71 | if column_name == "Table":
72 | continue
73 |
74 | field = column_data["field"]
75 | column_value = None
76 |
77 | # Handle fields that may contain arrays
78 | if isinstance(field, list):
79 | # If the field is an array, it contains two fields to be combined
80 | count_field, wait_time_field = field
81 |
82 | # Get the count and wait_time values from the combined fields
83 | count_value = metrics.get(count_field, {})
84 | wait_time_value = metrics.get(wait_time_field, {})
85 |
86 | if tab.pfs_metrics_radio_set.pressed_button.id == "pfs_metrics_total":
87 | count_value = count_value.get("t", 0)
88 | wait_time_value = wait_time_value.get("t", 0)
89 | else:
90 | count_value = count_value.get("d", 0)
91 | wait_time_value = wait_time_value.get("d", 0)
92 |
93 | if count_value and wait_time_value:
94 | column_value = f"{format_time(wait_time_value, picoseconds=True)} ({format_number(count_value)})"
95 | else:
96 | column_value = "[dark_gray]N/A"
97 | else:
98 | column_value = metrics.get(field, {})
99 | if tab.pfs_metrics_radio_set.pressed_button.id == "pfs_metrics_total":
100 | column_value = column_value.get("t", 0)
101 | else:
102 | column_value = column_value.get("d", 0)
103 |
104 | # Handle row updates
105 | if row_exists:
106 | current_value = datatable.get_row(row_id)[column_id]
107 | if column_value != current_value:
108 | datatable.update_cell(row_id, column_name, column_value)
109 | else:
110 | row_values.append(column_value)
111 |
112 | # Add the row if it's new
113 | if not row_exists and row_values:
114 | datatable.add_row(*row_values, key=row_id)
115 |
116 | # Clean up rows that no longer exist in the data
117 | if data:
118 | current_rows = set(data.keys())
119 | existing_rows = set(datatable.rows.keys())
120 |
121 | rows_to_remove = existing_rows - current_rows
122 | for row_id in rows_to_remove:
123 | datatable.remove_row(row_id)
124 | else:
125 | if datatable.row_count:
126 | datatable.clear()
127 |
128 | datatable.sort("wait_time_ps", reverse=True)
129 |
130 | # Update the title to reflect the number of active rows
131 | tab.pfs_metrics_tabs.get_tab("pfs_metrics_table_io_waits_tab").label = (
132 | f"Table I/O Waits ([$highlight]{datatable.row_count}[/$highlight])"
133 | )
134 |
135 |
136 | def update_file_io_by_instance(tab: Tab) -> DataTable:
137 | dolphie = tab.dolphie
138 | datatable = tab.pfs_metrics_file_io_datatable
139 |
140 | if not dolphie.file_io_data or not dolphie.file_io_data.filtered_data:
141 | datatable.display = False
142 | tab.pfs_metrics_tabs.get_tab("pfs_metrics_file_io_tab").label = "File I/O ([$highlight]0[/$highlight])"
143 |
144 | return
145 |
146 | datatable.display = True
147 |
148 | columns = {
149 | "File or Table": {"field": "FILE_NAME", "width": None},
150 | "Wait Time": {"field": "SUM_TIMER_WAIT", "width": 10, "format": "time"},
151 | "Read Ops": {"field": "COUNT_READ", "width": 10, "format": "number"},
152 | "Write Ops": {"field": "COUNT_WRITE", "width": 10, "format": "number"},
153 | "Misc Ops": {"field": "COUNT_MISC", "width": 10, "format": "number"},
154 | "Read Bytes": {"field": "SUM_NUMBER_OF_BYTES_READ", "width": 10, "format": "bytes"},
155 | "Write Bytes": {"field": "SUM_NUMBER_OF_BYTES_WRITE", "width": 11, "format": "bytes"},
156 | "wait_time_ps": {"field": "SUM_TIMER_WAIT", "width": 0},
157 | }
158 |
159 | # Add columns to the datatable if it is empty
160 | if not datatable.columns:
161 | for column_key, column_data in columns.items():
162 | column_width = column_data["width"]
163 | datatable.add_column(column_key, key=column_key, width=column_width)
164 |
165 | data = dolphie.file_io_data.filtered_data
166 | for file_name, metrics in data.items():
167 | row_id = file_name
168 | row_values = []
169 |
170 | table_match = dolphie.file_io_data.table_pattern.search(file_name)
171 | if file_name.endswith("/mysql.ibd"):
172 | file_name = f"[dark_gray]{os.path.dirname(file_name)}[/dark_gray]/{os.path.basename(file_name)}"
173 | elif table_match:
174 | file_name = f"{table_match.group(1)}.{table_match.group(2)}"
175 | elif "/" in file_name:
176 | file_name = f"[dark_gray]{os.path.dirname(file_name)}[/dark_gray]/{os.path.basename(file_name)}"
177 | else:
178 | file_name = f"[b][light_blue][[/light_blue][/b][highlight]{file_name}[b][light_blue]][/light_blue][/b]"
179 |
180 | row_values.append(file_name)
181 |
182 | # Check if row already exists before processing columns
183 | if row_id in datatable.rows:
184 | row_exists = True
185 | else:
186 | row_exists = False
187 |
188 | for column_id, (column_name, column_data) in enumerate(columns.items()):
189 | if column_data["field"] == "FILE_NAME":
190 | continue
191 |
192 | column_value = metrics.get(column_data["field"], {})
193 | if tab.pfs_metrics_radio_set.pressed_button.id == "pfs_metrics_total":
194 | column_value = column_value.get("t", 0)
195 | else:
196 | column_value = column_value.get("d", 0)
197 |
198 | # Handle special formatting
199 | if column_data.get("format") == "time":
200 | column_value = format_time(column_value, picoseconds=True)
201 | elif column_value == 0 or column_value is None:
202 | if column_name == "wait_time_ps":
203 | column_value = 0
204 | else:
205 | column_value = "[dark_gray]0"
206 | elif column_data.get("format") == "number":
207 | column_value = format_number(column_value)
208 | elif column_data.get("format") == "bytes":
209 | column_value = format_bytes(column_value)
210 |
211 | if row_exists:
212 | # Check and update only if the value has changed
213 | current_value = datatable.get_row(row_id)[column_id]
214 | if column_value != current_value:
215 | datatable.update_cell(row_id, column_name, column_value)
216 | else:
217 | # Add new row values
218 | row_values.append(column_value)
219 |
220 | # Add the row if it's new
221 | if not row_exists and row_values:
222 | datatable.add_row(*row_values, key=row_id)
223 |
224 | # Clean up rows that no longer exist in the data
225 | if data:
226 | current_rows = set(data.keys())
227 | existing_rows = set(datatable.rows.keys())
228 |
229 | rows_to_remove = existing_rows - current_rows
230 | for row_id in rows_to_remove:
231 | datatable.remove_row(row_id)
232 | else:
233 | if datatable.row_count:
234 | datatable.clear()
235 |
236 | datatable.sort("wait_time_ps", reverse=True)
237 |
238 | # Update the title to reflect the number of active rows
239 | tab.pfs_metrics_tabs.get_tab("pfs_metrics_file_io_tab").label = (
240 | f"File I/O ([$highlight]{datatable.row_count}[/$highlight])"
241 | )
242 |
--------------------------------------------------------------------------------
/dolphie/Panels/Processlist.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from rich.syntax import Syntax
4 | from textual.widgets import DataTable
5 |
6 | from dolphie.DataTypes import ProcesslistThread
7 | from dolphie.Modules.Functions import format_number, format_query
8 | from dolphie.Modules.Queries import MySQLQueries
9 | from dolphie.Modules.TabManager import Tab
10 |
11 |
12 | def create_panel(tab: Tab) -> DataTable:
13 | dolphie = tab.dolphie
14 |
15 | columns = [
16 | {"name": "Thread ID", "field": "id", "width": None, "format_number": False},
17 | ]
18 |
19 | if dolphie.use_performance_schema_for_processlist:
20 | columns.extend([{"name": "Protocol", "field": "protocol", "width": 8, "format_number": False}])
21 |
22 | columns.extend(
23 | [
24 | {"name": "Username", "field": "user", "width": 20, "format_number": False},
25 | ]
26 | )
27 |
28 | if dolphie.show_additional_query_columns:
29 | columns.extend(
30 | [
31 | {"name": "Hostname/IP", "field": "host", "width": 25, "format_number": False},
32 | {"name": "Database", "field": "db", "width": 15, "format_number": False},
33 | ]
34 | )
35 |
36 | columns.extend(
37 | [
38 | {"name": "Command", "field": "command", "width": 8, "format_number": False},
39 | {"name": "State", "field": "state", "width": 20, "format_number": False},
40 | {"name": "TRX State", "field": "trx_state", "width": 9, "format_number": False},
41 | {"name": "R-Lock", "field": "trx_rows_locked", "width": 7, "format_number": True},
42 | {"name": "R-Mod", "field": "trx_rows_modified", "width": 7, "format_number": True},
43 | ]
44 | )
45 |
46 | if (
47 | dolphie.show_additional_query_columns and dolphie.global_variables.get("innodb_thread_concurrency")
48 | ) or dolphie.show_threads_with_concurrency_tickets:
49 | columns.append({"name": "Tickets", "field": "trx_concurrency_tickets", "width": 8, "format_number": False})
50 |
51 | if dolphie.show_trxs_only:
52 | columns.append(
53 | {"name": "TRX Age", "field": "trx_time", "width": 9, "format_number": False},
54 | )
55 |
56 | columns.extend(
57 | [
58 | {"name": "Age", "field": "formatted_time", "width": 9, "format_number": False},
59 | {"name": "Query", "field": "formatted_query", "width": None, "format_number": False},
60 | {"name": "time_seconds", "field": "time", "width": 0, "format_number": False},
61 | ]
62 | )
63 |
64 | # Refresh optimization
65 | query_length_max = 300
66 | processlist_datatable = tab.processlist_datatable
67 |
68 | # Clear table if columns change
69 | if len(processlist_datatable.columns) != len(columns):
70 | processlist_datatable.clear(columns=True)
71 |
72 | # Add columns to the datatable if it is empty
73 | if not processlist_datatable.columns:
74 | for column_data in columns:
75 | column_name = column_data["name"]
76 | column_width = column_data["width"]
77 | processlist_datatable.add_column(column_name, key=column_name, width=column_width)
78 |
79 | filter_threads = []
80 | # Iterate through processlist_threads
81 | for thread_id, thread in dolphie.processlist_threads.items():
82 | row_values = []
83 |
84 | thread: ProcesslistThread
85 | # We use filter here for replays since the original way requires changing WHERE clause
86 | if dolphie.replay_file:
87 | found = False
88 | if dolphie.show_trxs_only and thread.trx_state == "[dark_gray]N/A":
89 | found = True
90 | elif dolphie.user_filter and dolphie.user_filter != thread.user:
91 | found = True
92 | elif dolphie.db_filter and dolphie.db_filter != thread.db:
93 | found = True
94 | elif dolphie.host_filter and dolphie.host_filter not in thread.host:
95 | found = True
96 | elif dolphie.query_time_filter and dolphie.query_time_filter >= thread.time:
97 | found = True
98 | elif dolphie.query_filter and dolphie.query_filter not in thread.formatted_query.code:
99 | found = True
100 | elif dolphie.show_threads_with_concurrency_tickets and thread.trx_concurrency_tickets == "[dark_gray]0":
101 | found = True
102 |
103 | if found:
104 | filter_threads.append(thread_id)
105 | continue
106 |
107 | for column_id, (column_data) in enumerate(columns):
108 | column_name = column_data["name"]
109 | column_field = column_data["field"]
110 | column_format_number = column_data["format_number"]
111 | column_value = getattr(thread, column_field)
112 |
113 | thread_value = format_number(column_value) if column_format_number else column_value
114 | if thread_id in processlist_datatable.rows:
115 | datatable_value = processlist_datatable.get_row(thread_id)[column_id]
116 |
117 | # Initialize temp values for possible Syntax object comparison below
118 | temp_thread_value = thread_value
119 | temp_datatable_value = datatable_value
120 |
121 | # If the column is the query, we need to compare the code of the Syntax object
122 | update_width = False
123 | if column_field == "formatted_query":
124 | update_width = True
125 | if isinstance(thread_value, Syntax):
126 | temp_thread_value = thread_value.code[:query_length_max]
127 | thread_value = format_query(temp_thread_value)
128 | if isinstance(datatable_value, Syntax):
129 | temp_datatable_value = datatable_value.code
130 |
131 | # Update the datatable if values differ
132 | if (
133 | temp_thread_value != temp_datatable_value
134 | or column_field == "formatted_time"
135 | or column_field == "time"
136 | ):
137 | processlist_datatable.update_cell(thread_id, column_name, thread_value, update_width=update_width)
138 | else:
139 | # Only show the first {query_length_max} characters of the query
140 | if column_field == "formatted_query" and isinstance(thread_value, Syntax):
141 | thread_value = format_query(thread_value.code[:query_length_max])
142 |
143 | # Create an array of values to append to the datatable
144 | row_values.append(thread_value)
145 |
146 | # Add a new row to the datatable
147 | if row_values:
148 | processlist_datatable.add_row(*row_values, key=thread_id)
149 |
150 | # Remove threads that were filtered out
151 | for thread_id in filter_threads:
152 | dolphie.processlist_threads.pop(thread_id)
153 |
154 | # Remove rows from processlist_datatable that no longer exist in processlist_threads
155 | if dolphie.processlist_threads:
156 | rows_to_remove = set(processlist_datatable.rows.keys()) - set(dolphie.processlist_threads.keys())
157 | for id in rows_to_remove:
158 | processlist_datatable.remove_row(id)
159 | else:
160 | if processlist_datatable.row_count:
161 | processlist_datatable.clear()
162 |
163 | processlist_datatable.sort("time_seconds", reverse=dolphie.sort_by_time_descending)
164 |
165 | title = (
166 | f"{dolphie.panels.get_panel_title(dolphie.panels.processlist.name)} "
167 | f"([$highlight]{processlist_datatable.row_count}[/$highlight]"
168 | )
169 | if dolphie.show_threads_with_concurrency_tickets:
170 | title += f"/[$highlight]{dolphie.global_variables.get('innodb_thread_concurrency')}[/$highlight]"
171 | title += ")"
172 | tab.processlist_title.update(title)
173 |
174 |
175 | def fetch_data(tab: Tab) -> Dict[str, ProcesslistThread]:
176 | dolphie = tab.dolphie
177 |
178 | if dolphie.performance_schema_enabled and dolphie.use_performance_schema_for_processlist:
179 | processlist_query = MySQLQueries.ps_query
180 | if not dolphie.is_mysql_version_at_least("5.7"):
181 | # Remove the connection_type field for MySQL versions below 5.7 since it doesn't exist
182 | processlist_query = processlist_query.replace("connection_type", '""')
183 | else:
184 | processlist_query = MySQLQueries.pl_query
185 |
186 | ########################
187 | # WHERE clause filters #
188 | ########################
189 | where_clause = []
190 |
191 | # Filter out idle threads if specified
192 | if not dolphie.show_idle_threads:
193 | if dolphie.use_performance_schema_for_processlist:
194 | where_clause.append(
195 | "(processlist_command != 'Sleep' AND processlist_command NOT LIKE 'Binlog Dump%') AND (processlist_info"
196 | " IS NOT NULL OR trx_query IS NOT NULL) AND IFNULL(processlist_state, '') NOT LIKE 'Group Replication"
197 | " Module%'"
198 | )
199 | else:
200 | where_clause.append(
201 | "(Command != 'Sleep' AND Command NOT LIKE 'Binlog Dump%') AND (Info IS NOT NULL OR trx_query IS NOT"
202 | " NULL) AND IFNULL(State, '') NOT LIKE 'Group Replication Module%'"
203 | )
204 |
205 | # Only show running transactions only
206 | if dolphie.show_trxs_only:
207 | where_clause.append("trx_state != ''")
208 |
209 | if dolphie.show_threads_with_concurrency_tickets:
210 | where_clause.append("trx_concurrency_tickets > 0")
211 |
212 | # Filter user
213 | if dolphie.user_filter:
214 | if dolphie.use_performance_schema_for_processlist:
215 | where_clause.append("processlist_user = '%s'" % dolphie.user_filter)
216 | else:
217 | where_clause.append("User = '%s'" % dolphie.user_filter)
218 |
219 | # Filter database
220 | if dolphie.db_filter:
221 | if dolphie.use_performance_schema_for_processlist:
222 | where_clause.append("processlist_db = '%s'" % dolphie.db_filter)
223 | else:
224 | where_clause.append("db = '%s'" % dolphie.db_filter)
225 |
226 | # Filter hostname/IP
227 | if dolphie.host_filter:
228 | # Have to use LIKE since there's a port at the end
229 | if dolphie.use_performance_schema_for_processlist:
230 | where_clause.append("processlist_host LIKE '%s%%'" % dolphie.host_filter)
231 | else:
232 | where_clause.append("Host LIKE '%s%%'" % dolphie.host_filter)
233 |
234 | # Filter time
235 | if dolphie.query_time_filter:
236 | if dolphie.use_performance_schema_for_processlist:
237 | where_clause.append("processlist_time >= '%s'" % dolphie.query_time_filter)
238 | else:
239 | where_clause.append("Time >= '%s'" % dolphie.query_time_filter)
240 |
241 | # Filter query
242 | if dolphie.query_filter:
243 | if dolphie.use_performance_schema_for_processlist:
244 | where_clause.append(
245 | "(processlist_info LIKE '%%%s%%' OR trx_query LIKE '%%%s%%')"
246 | % (dolphie.query_filter, dolphie.query_filter),
247 | )
248 | else:
249 | where_clause.append("Info LIKE '%%%s%%'" % dolphie.query_filter)
250 |
251 | # Add in our dynamic WHERE clause for filtering
252 | if where_clause:
253 | processlist_query = processlist_query.replace("$1", "AND " + " AND ".join(where_clause))
254 | else:
255 | processlist_query = processlist_query.replace("$1", "")
256 |
257 | processlist_threads = {}
258 | # Run the processlist query
259 | dolphie.main_db_connection.execute(processlist_query)
260 | threads = dolphie.main_db_connection.fetchall()
261 |
262 | for thread in threads:
263 | # Don't include Dolphie's threads
264 | if (
265 | dolphie.main_db_connection.connection_id == thread["id"]
266 | or dolphie.secondary_db_connection.connection_id == thread["id"]
267 | ):
268 | continue
269 |
270 | # Use trx_query over Performance Schema query since it's more accurate
271 | if dolphie.use_performance_schema_for_processlist and thread["trx_query"]:
272 | thread["query"] = thread["trx_query"]
273 | thread["query"] = "" if thread["query"] is None else thread["query"]
274 |
275 | if thread["host"]:
276 | host = thread["host"].split(":")[0]
277 | thread["host"] = dolphie.get_hostname(host)
278 |
279 | # Remove trx_query from the thread data since it's not needed
280 | thread.pop("trx_query", None)
281 |
282 | processlist_threads[str(thread["id"])] = ProcesslistThread(thread)
283 |
284 | return processlist_threads
285 |
--------------------------------------------------------------------------------
/dolphie/Panels/ProxySQLCommandStats.py:
--------------------------------------------------------------------------------
1 | from textual.widgets import DataTable
2 |
3 | from dolphie.Modules.Functions import format_number
4 | from dolphie.Modules.TabManager import Tab
5 |
6 |
7 | def create_panel(tab: Tab) -> DataTable:
8 | dolphie = tab.dolphie
9 |
10 | columns = {
11 | "Command": {"name": "Command", "width": None, "format": None},
12 | "Total_cnt": {"name": "Total", "width": 10, "format": "number"},
13 | "Total_cnt_s": {"name": "Total/s", "width": 10, "format": "number"},
14 | "cnt_100us": {"name": "100μs/s", "width": 8, "format": "number"},
15 | "cnt_500us": {"name": "500μs/s", "width": 8, "format": "number"},
16 | "cnt_1ms": {"name": "1ms/s", "width": 8, "format": "number"},
17 | "cnt_5ms": {"name": "5ms/s", "width": 8, "format": "number"},
18 | "cnt_10ms": {"name": "10ms/s", "width": 8, "format": "number"},
19 | "cnt_50ms": {"name": "50ms/s", "width": 8, "format": "number"},
20 | "cnt_100ms": {"name": "100ms/s", "width": 8, "format": "number"},
21 | "cnt_500ms": {"name": "500ms/s", "width": 8, "format": "number"},
22 | "cnt_1s": {"name": "1s/s", "width": 8, "format": "number"},
23 | "cnt_5s": {"name": "5s/s", "width": 8, "format": "number"},
24 | "cnt_10s": {"name": "10s/s", "width": 8, "format": "number"},
25 | "cnt_INFs": {"name": "10s+/s", "width": 8, "format": "number"},
26 | }
27 |
28 | command_stats = tab.proxysql_command_stats_datatable
29 |
30 | # Add columns to the datatable if it is empty
31 | if not command_stats.columns:
32 | for column_key, column_data in columns.items():
33 | column_name = column_data["name"]
34 | column_width = column_data["width"]
35 | command_stats.add_column(column_name, key=column_key, width=column_width)
36 |
37 | for row in dolphie.proxysql_command_stats:
38 | row_id = row["Command"]
39 | row_values = []
40 |
41 | for column_id, (column_key, column_data) in enumerate(columns.items()):
42 | column_name = column_data["name"]
43 | column_format = column_data["format"]
44 | column_value = row.get(column_key)
45 |
46 | # Calculate the values per second for the following columns
47 | if "cnt_" in column_key:
48 | if not dolphie.proxysql_per_second_data.get(row_id, {}).get(column_key, 0):
49 | column_value = 0
50 | else:
51 | value_diff = int(column_value) - dolphie.proxysql_per_second_data.get(row_id, {}).get(column_key, 0)
52 | column_value = round(value_diff / dolphie.polling_latency)
53 |
54 | dolphie.proxysql_per_second_data.setdefault(row_id, {})[column_key] = int(row.get(column_key, 0))
55 |
56 | if column_format == "number":
57 | column_value = format_number(column_value)
58 |
59 | if column_value == "0":
60 | column_value = "[dark_gray]0"
61 |
62 | if row_id in command_stats.rows:
63 | datatable_value = command_stats.get_row(row_id)[column_id]
64 |
65 | # Update the datatable if values differ
66 | if column_value != datatable_value:
67 | command_stats.update_cell(row_id, column_key, column_value)
68 | else:
69 | # Create an array of values to append to the datatable
70 | row_values.append(column_value)
71 |
72 | # Add a new row to the datatable
73 | if row_values:
74 | command_stats.add_row(*row_values, key=row_id)
75 |
76 | # Remove rows from datatable that no longer exist in the data
77 | if dolphie.proxysql_command_stats:
78 | current_rows = {row["Command"] for row in dolphie.proxysql_command_stats}
79 | existing_rows = set(command_stats.rows.keys())
80 |
81 | rows_to_remove = existing_rows - current_rows
82 | for row_id in rows_to_remove:
83 | command_stats.remove_row(row_id)
84 | else:
85 | if command_stats.row_count:
86 | command_stats.clear()
87 |
88 | tab.proxysql_command_stats_title.update(
89 | f"{dolphie.panels.get_panel_title(dolphie.panels.proxysql_command_stats.name)} "
90 | f"([$highlight]{command_stats.row_count}[/$highlight])"
91 | )
92 |
--------------------------------------------------------------------------------
/dolphie/Panels/ProxySQLDashboard.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 | from rich.style import Style
4 | from rich.table import Table
5 |
6 | from dolphie.Modules.Functions import format_bytes, format_number
7 | from dolphie.Modules.MetricManager import MetricData
8 | from dolphie.Modules.TabManager import Tab
9 | from dolphie.Panels.Dashboard import create_system_utilization_table
10 |
11 |
12 | def create_panel(tab: Tab) -> Table:
13 | dolphie = tab.dolphie
14 |
15 | global_status = dolphie.global_status
16 |
17 | ####################
18 | # Host Information #
19 | ####################
20 | runtime = str(datetime.now() - dolphie.dolphie_start_time).split(".")[0]
21 |
22 | table_title_style = Style(color="#bbc8e8", bold=True)
23 | table = Table(
24 | show_header=False,
25 | box=None,
26 | title=f"{dolphie.panels.get_key(dolphie.panels.dashboard.name)}Host Information",
27 | title_style=table_title_style,
28 | )
29 |
30 | table.add_column()
31 | table.add_column(min_width=15)
32 | table.add_row("[label]Version", f"{dolphie.host_distro} {dolphie.host_version}")
33 | table.add_row("[label]Uptime", str(timedelta(seconds=global_status["ProxySQL_Uptime"])))
34 | table.add_row(
35 | "[label]MySQL",
36 | (
37 | f"{dolphie.global_variables['mysql-server_version']} "
38 | f"[label]Workers[/label] {global_status['MySQL_Thread_Workers']}"
39 | ),
40 | )
41 | if not dolphie.replay_file:
42 | table.add_row("[label]Runtime", runtime)
43 |
44 | if dolphie.worker_processing_time:
45 | table.add_row("[label]Latency", f"{round(dolphie.worker_processing_time, 2)}s")
46 |
47 | tab.dashboard_section_1.update(table)
48 |
49 | ######################
50 | # System Utilization #
51 | ######################
52 | table = create_system_utilization_table(tab)
53 |
54 | if table:
55 | tab.dashboard_section_6.update(create_system_utilization_table(tab))
56 |
57 | ##########################
58 | # Connection Information #
59 | ##########################
60 | proxysql_connections = dolphie.metric_manager.metrics.proxysql_connections
61 |
62 | table = Table(show_header=False, box=None, title="Connections", title_style=table_title_style)
63 |
64 | table.add_column()
65 | table.add_column(min_width=6)
66 | data_dict = {
67 | "[label]FE Connected": proxysql_connections.Client_Connections_connected.values,
68 | "[label]FE Non-idle": proxysql_connections.Client_Connections_non_idle.values,
69 | "[label]BE Connected": proxysql_connections.Server_Connections_connected.values,
70 | "[label]FE Created": proxysql_connections.Client_Connections_created.values,
71 | "[label]BE Created": proxysql_connections.Server_Connections_created.values,
72 | }
73 |
74 | fe_usage = round(
75 | (dolphie.global_status["Client_Connections_connected"] / dolphie.global_variables["mysql-max_connections"])
76 | * 100,
77 | 2,
78 | )
79 |
80 | metric_data = dolphie.metric_manager.metrics.proxysql_multiplex_efficiency.proxysql_multiplex_efficiency_ratio
81 | if metric_data.values:
82 | if metric_data.values[-1] >= 85:
83 | color_code = "green"
84 | elif metric_data.values[-1] >= 50:
85 | color_code = "yellow"
86 | else:
87 | color_code = "red"
88 |
89 | mp_efficiency = f"[{color_code}]{metric_data.values[-1]}%[/{color_code}]"
90 | else:
91 | mp_efficiency = "N/A"
92 |
93 | if fe_usage >= 90:
94 | color_code = "red"
95 | elif fe_usage >= 70:
96 | color_code = "yellow"
97 | else:
98 | color_code = "green"
99 |
100 | table.add_row("[label]MP Efficiency", mp_efficiency)
101 | table.add_row("[label]FE Usage", f"[{color_code}]{fe_usage}%")
102 | table.add_row("[label]Active TRX", f"{global_status['Active_Transactions']}")
103 | for label, values in data_dict.items():
104 | if values:
105 | value = format_number(values[-1])
106 | else:
107 | value = 0
108 |
109 | if "Created" in label or "Aborted" in label or "Wrong Passwd" in label:
110 | table.add_row(label, f"{value}/s")
111 | else:
112 | table.add_row(label, f"{value}")
113 |
114 | # Reuse Innodb table for connection information
115 | tab.dashboard_section_2.update(table)
116 |
117 | ####################################
118 | # Query Sent/Recv Rate Information #
119 | ####################################
120 | proxysql_queries_network_data = dolphie.metric_manager.metrics.proxysql_queries_data_network
121 |
122 | table = Table(show_header=False, box=None, title="Query Data Rates/s", title_style=table_title_style)
123 |
124 | table.add_column()
125 | table.add_column(min_width=9)
126 | data_dict = {
127 | "[label]FE Sent": proxysql_queries_network_data.Queries_frontends_bytes_sent.values,
128 | "[label]BE Sent": proxysql_queries_network_data.Queries_backends_bytes_sent.values,
129 | "[label]FE Recv": proxysql_queries_network_data.Queries_frontends_bytes_recv.values,
130 | "[label]BE Recv": proxysql_queries_network_data.Queries_backends_bytes_recv.values,
131 | }
132 |
133 | for label, values in data_dict.items():
134 | if values:
135 | value = format_bytes(values[-1])
136 | else:
137 | value = 0
138 |
139 | if "Created" in label or "Aborted" in label or "Wrong Passwd" in label:
140 | table.add_row(label, f"{value}/s")
141 | else:
142 | table.add_row(label, f"{value}")
143 |
144 | # Reuse binary log table for connection information
145 | tab.dashboard_section_3.update(table)
146 |
147 | ###############
148 | # Statistics #
149 | ###############
150 | table = Table(show_header=False, box=None, title="Statistics/s", title_style=table_title_style)
151 |
152 | table.add_column()
153 | table.add_column(min_width=7)
154 |
155 | # Add DML statistics
156 | metrics = dolphie.metric_manager.metrics.dml
157 | metric_labels = {
158 | "Queries": "Queries",
159 | "SELECT": "Com_select",
160 | "INSERT": "Com_insert",
161 | "UPDATE": "Com_update",
162 | "DELETE": "Com_delete",
163 | "REPLACE": "Com_replace",
164 | "COMMIT": "Com_commit",
165 | "ROLLBACK": "Com_rollback",
166 | }
167 |
168 | for label, metric_name in metric_labels.items():
169 | metric_data: MetricData = getattr(metrics, metric_name)
170 |
171 | if metric_data.values:
172 | table.add_row(f"[label]{label}", format_number(metric_data.values[-1]))
173 | else:
174 | table.add_row(f"[label]{label}", "0")
175 |
176 | tab.dashboard_section_4.update(table)
177 |
--------------------------------------------------------------------------------
/dolphie/Panels/ProxySQLHostgroupSummary.py:
--------------------------------------------------------------------------------
1 | from textual.widgets import DataTable
2 |
3 | from dolphie.Modules.Functions import format_bytes, format_number
4 | from dolphie.Modules.TabManager import Tab
5 |
6 |
7 | def create_panel(tab: Tab) -> DataTable:
8 | dolphie = tab.dolphie
9 |
10 | columns = {
11 | "hostgroup": {"name": "Hostgroup", "width": None, "format": None},
12 | "srv_host": {"name": "Backend Host", "width": 35, "format": None},
13 | "srv_port": {"name": "Port", "width": 5, "format": None},
14 | "status": {"name": "Status", "width": None, "format": None},
15 | "weight": {"name": "Weight", "width": None, "format": None},
16 | "use_ssl": {"name": "SSL", "width": 5, "format": None},
17 | "ConnUsed": {"name": "Conn Used", "width": 11, "format": "number"},
18 | "ConnFree": {"name": "Conn Free", "width": 10, "format": "number"},
19 | "ConnOK": {"name": "Conn OK", "width": 10, "format": "number"},
20 | "ConnERR": {"name": "Conn ERR", "width": 10, "format": "number"},
21 | "MaxConnUsed": {"name": "Max Conn", "width": 11, "format": "number"},
22 | "Queries_per_sec": {"name": "Queries/s", "width": 10, "format": "number"},
23 | "Bytes_data_sent_per_sec": {"name": "Data Sent/s", "width": 11, "format": "bytes"},
24 | "Bytes_data_recv_per_sec": {"name": "Data Recvd/s", "width": 12, "format": "bytes"},
25 | "Latency_us": {"name": "Latency (ms)", "width": 12, "format": "time"},
26 | }
27 |
28 | hostgroup_summary_datatable = tab.proxysql_hostgroup_summary_datatable
29 |
30 | # Add columns to the datatable if it is empty
31 | if not hostgroup_summary_datatable.columns:
32 | for column_key, column_data in columns.items():
33 | column_name = column_data["name"]
34 | column_width = column_data["width"]
35 | hostgroup_summary_datatable.add_column(column_name, key=column_key, width=column_width)
36 |
37 | for row in dolphie.proxysql_hostgroup_summary:
38 | row_id = f"{row['hostgroup']}_{row['srv_host']}_{row['srv_port']}"
39 | row_values = []
40 |
41 | for column_id, (column_key, column_data) in enumerate(columns.items()):
42 | column_name = column_data["name"]
43 | column_format = column_data["format"]
44 | column_value = row.get(column_key, 0)
45 |
46 | if column_format == "time":
47 | column_value = f"{round(int(column_value) / 1000, 2)}"
48 | elif column_format == "bytes":
49 | column_value = format_bytes(column_value)
50 | elif column_format == "number":
51 | column_value = format_number(column_value)
52 | elif column_key == "hostgroup":
53 | column_value = int(column_value)
54 | elif column_key == "srv_host":
55 | column_value = dolphie.get_hostname(column_value)
56 | elif column_key == "status":
57 | column_value = "[green]ONLINE" if column_value == "ONLINE" else f"[red]{column_value}"
58 | elif column_key == "use_ssl":
59 | column_value = "ON" if column_value == "1" else "OFF"
60 |
61 | if column_key != "hostgroup" and (column_value == "0" or column_value == 0):
62 | column_value = "[dark_gray]0"
63 |
64 | if row_id in hostgroup_summary_datatable.rows:
65 | datatable_value = hostgroup_summary_datatable.get_row(row_id)[column_id]
66 |
67 | # Update the datatable if values differ
68 | if column_value != datatable_value:
69 | hostgroup_summary_datatable.update_cell(row_id, column_key, column_value)
70 | else:
71 | # Create an array of values to append to the datatable
72 | row_values.append(column_value)
73 |
74 | # Add a new row to the datatable
75 | if row_values:
76 | hostgroup_summary_datatable.add_row(*row_values, key=row_id)
77 |
78 | # Remove rows from datatable that no longer exist in the data
79 | if dolphie.proxysql_hostgroup_summary:
80 | current_rows = {
81 | f"{row['hostgroup']}_{row['srv_host']}_{row['srv_port']}" for row in dolphie.proxysql_hostgroup_summary
82 | }
83 | existing_rows = set(hostgroup_summary_datatable.rows.keys())
84 |
85 | rows_to_remove = existing_rows - current_rows
86 | for row_id in rows_to_remove:
87 | hostgroup_summary_datatable.remove_row(row_id)
88 | else:
89 | if hostgroup_summary_datatable.row_count:
90 | hostgroup_summary_datatable.clear()
91 |
92 | hostgroup_summary_datatable.sort("hostgroup")
93 |
94 | tab.proxysql_hostgroup_summary_title.update(
95 | f"{dolphie.panels.get_panel_title(dolphie.panels.proxysql_hostgroup_summary.name)} "
96 | f"([$highlight]{hostgroup_summary_datatable.row_count}[/$highlight])"
97 | )
98 |
--------------------------------------------------------------------------------
/dolphie/Panels/ProxySQLProcesslist.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from rich.syntax import Syntax
4 | from textual.widgets import DataTable
5 |
6 | from dolphie.DataTypes import ProcesslistThread, ProxySQLProcesslistThread
7 | from dolphie.Modules.Functions import format_query
8 | from dolphie.Modules.Queries import ProxySQLQueries
9 | from dolphie.Modules.TabManager import Tab
10 |
11 |
12 | def create_panel(tab: Tab) -> DataTable:
13 | dolphie = tab.dolphie
14 |
15 | columns = [
16 | {"name": "Thread ID", "field": "id", "width": 11},
17 | {"name": "Hostgroup", "field": "hostgroup", "width": 9},
18 | {"name": "Username", "field": "user", "width": 20},
19 | ]
20 |
21 | if dolphie.show_additional_query_columns:
22 | columns.extend(
23 | [
24 | {"name": "Frontend Host", "field": "frontend_host", "width": 25},
25 | ]
26 | )
27 |
28 | columns.extend(
29 | [
30 | {"name": "Backend Host", "field": "host", "width": 25},
31 | {"name": "Database", "field": "db", "width": 17},
32 | {"name": "Command", "field": "command", "width": 8},
33 | {"name": "Age", "field": "formatted_time", "width": 9},
34 | {"name": "Query", "field": "formatted_query", "width": None},
35 | {"name": "time_seconds", "field": "time", "width": 0},
36 | ]
37 | )
38 |
39 | # Refresh optimization
40 | query_length_max = 300
41 | processlist_datatable = tab.processlist_datatable
42 |
43 | # Clear table if columns change
44 | if len(processlist_datatable.columns) != len(columns):
45 | processlist_datatable.clear(columns=True)
46 |
47 | # Add columns to the datatable if it is empty
48 | if not processlist_datatable.columns:
49 | for column_data in columns:
50 | column_name = column_data["name"]
51 | column_width = column_data["width"]
52 | processlist_datatable.add_column(column_name, key=column_name, width=column_width)
53 |
54 | filter_threads = []
55 | # Iterate through processlist_threads
56 | for thread_id, thread in dolphie.processlist_threads.items():
57 | row_values = []
58 |
59 | thread: ProxySQLProcesslistThread
60 | # We use filter here for replays since the original way requires changing WHERE clause
61 | if dolphie.replay_file:
62 | found = False
63 | if dolphie.user_filter and dolphie.user_filter != thread.user:
64 | found = True
65 | elif dolphie.db_filter and dolphie.db_filter != thread.db:
66 | found = True
67 | elif dolphie.host_filter and dolphie.host_filter not in thread.host:
68 | found = True
69 | elif dolphie.query_time_filter and dolphie.query_time_filter >= thread.time:
70 | found = True
71 | elif dolphie.query_filter and dolphie.query_filter not in thread.formatted_query.code:
72 | found = True
73 | elif dolphie.hostgroup_filter and dolphie.hostgroup_filter != thread.hostgroup:
74 | found = True
75 |
76 | if found:
77 | filter_threads.append(thread_id)
78 | continue
79 |
80 | for column_id, (column_data) in enumerate(columns):
81 | column_name = column_data["name"]
82 | column_field = column_data["field"]
83 | column_value = getattr(thread, column_field)
84 |
85 | thread_value = column_value
86 |
87 | if thread_id in processlist_datatable.rows:
88 | datatable_value = processlist_datatable.get_row(thread_id)[column_id]
89 |
90 | # Initialize temp values for possible Syntax object comparison below
91 | temp_thread_value = thread_value
92 | temp_datatable_value = datatable_value
93 |
94 | # If the column is the query, we need to compare the code of the Syntax object
95 | update_width = False
96 | if column_field == "formatted_query":
97 | update_width = True
98 | if isinstance(thread_value, Syntax):
99 | temp_thread_value = thread_value.code[:query_length_max]
100 | thread_value = format_query(temp_thread_value)
101 | if isinstance(datatable_value, Syntax):
102 | temp_datatable_value = datatable_value.code
103 |
104 | # Update the datatable if values differ
105 | if (
106 | temp_thread_value != temp_datatable_value
107 | or column_field == "formatted_time"
108 | or column_field == "time"
109 | ):
110 | processlist_datatable.update_cell(thread_id, column_name, thread_value, update_width=update_width)
111 | else:
112 | # Only show the first {query_length_max} characters of the query
113 | if column_field == "formatted_query" and isinstance(thread_value, Syntax):
114 | thread_value = format_query(thread_value.code[:query_length_max])
115 |
116 | # Create an array of values to append to the datatable
117 | row_values.append(thread_value)
118 |
119 | # Add a new row to the datatable
120 | if row_values:
121 | processlist_datatable.add_row(*row_values, key=thread_id)
122 |
123 | # Remove threads that were filtered out
124 | for thread_id in filter_threads:
125 | dolphie.processlist_threads.pop(thread_id)
126 |
127 | # Remove rows from processlist_datatable that no longer exist in processlist_threads
128 | if dolphie.processlist_threads:
129 | rows_to_remove = set(processlist_datatable.rows.keys()) - set(dolphie.processlist_threads.keys())
130 | for id in rows_to_remove:
131 | processlist_datatable.remove_row(id)
132 | else:
133 | if processlist_datatable.row_count:
134 | processlist_datatable.clear()
135 |
136 | processlist_datatable.sort("time_seconds", reverse=dolphie.sort_by_time_descending)
137 |
138 | tab.processlist_title.update(
139 | f"{dolphie.panels.get_panel_title(dolphie.panels.processlist.name)} "
140 | f"([$highlight]{processlist_datatable.row_count}[/$highlight])"
141 | )
142 |
143 |
144 | def fetch_data(tab: Tab) -> Dict[str, ProcesslistThread]:
145 | dolphie = tab.dolphie
146 |
147 | ########################
148 | # WHERE clause filters #
149 | ########################
150 | where_clause = []
151 |
152 | # Filter out idle threads if specified
153 | if not dolphie.show_idle_threads:
154 | where_clause.append("command NOT IN ('Sleep', 'Connecting client')")
155 |
156 | # Filter user
157 | if dolphie.user_filter:
158 | where_clause.append("user = '%s'" % dolphie.user_filter)
159 |
160 | # Filter database
161 | if dolphie.db_filter:
162 | where_clause.append("db = '%s'" % dolphie.db_filter)
163 |
164 | # Filter hostname/IP
165 | if dolphie.host_filter:
166 | where_clause.append("srv_host = '%s'" % dolphie.host_filter)
167 |
168 | # Filter time
169 | if dolphie.query_time_filter:
170 | # Convert to seconds
171 | time = dolphie.query_time_filter * 1000
172 | where_clause.append("time_ms >= '%s'" % time)
173 |
174 | # Filter query
175 | if dolphie.query_filter:
176 | where_clause.append("info LIKE '%%%s%%'" % dolphie.query_filter)
177 |
178 | # Filter hostgroup
179 | if dolphie.hostgroup_filter:
180 | where_clause.append("hostgroup = '%s'" % dolphie.hostgroup_filter)
181 |
182 | # Add in our dynamic WHERE clause for filtering
183 | if where_clause:
184 | processlist_query = ProxySQLQueries.processlist.replace("$1", " AND ".join(where_clause))
185 | else:
186 | processlist_query = ProxySQLQueries.processlist.replace("$1", "1=1")
187 |
188 | processlist_threads = {}
189 | # Run the processlist query
190 | dolphie.main_db_connection.execute(processlist_query)
191 | threads = dolphie.main_db_connection.fetchall()
192 |
193 | for thread in threads:
194 | # Don't include Dolphie's threads
195 | if (
196 | dolphie.main_db_connection.connection_id == thread["id"]
197 | or dolphie.secondary_db_connection.connection_id == thread["id"]
198 | ):
199 | continue
200 |
201 | thread["frontend_host"] = dolphie.get_hostname(thread["frontend_host"])
202 | thread["backend_host"] = dolphie.get_hostname(thread["backend_host"])
203 | thread["query"] = "" if thread["query"] is None else thread["query"]
204 |
205 | processlist_threads[str(thread["id"])] = ProxySQLProcesslistThread(thread)
206 |
207 | return processlist_threads
208 |
--------------------------------------------------------------------------------
/dolphie/Panels/ProxySQLQueryRules.py:
--------------------------------------------------------------------------------
1 | from textual.widgets import DataTable
2 |
3 | from dolphie.Modules.Functions import format_number
4 | from dolphie.Modules.TabManager import Tab
5 |
6 |
7 | def create_panel(tab: Tab) -> DataTable:
8 | dolphie = tab.dolphie
9 |
10 | all_columns = {
11 | "rule_id": {"name": "Rule ID", "format": None, "always_show": True},
12 | "hits": {"name": "Total Hits", "format": "number", "always_show": True},
13 | "hits_s": {"name": "Hits/s", "format": "number", "always_show": True},
14 | "apply": {"name": "Apply", "format": None, "always_show": True},
15 | "log": {"name": "Log", "format": None, "always_show": True},
16 | "flagIN": {"name": "flagIN", "format": None, "always_show": False},
17 | "flagOUT": {"name": "flagOUT", "format": None, "always_show": False},
18 | "destination_hostgroup": {"name": "Dest HG", "format": None, "always_show": False},
19 | "username": {"name": "Username", "format": None, "always_show": False},
20 | "match_pattern": {"name": "Match Pattern", "format": None, "always_show": False},
21 | "match_digest": {"name": "Match Digest", "format": None, "always_show": False},
22 | "schemaname": {"name": "Schema", "format": None, "always_show": False},
23 | "client_addr": {"name": "Client Addr", "format": None, "always_show": False},
24 | "proxy_addr": {"name": "Proxy Addr", "format": None, "always_show": False},
25 | "proxy_port": {"name": "Proxy Port", "format": None, "always_show": False},
26 | "digest": {"name": "Digest", "format": None, "always_show": False},
27 | "negate_match_pattern": {"name": "Negate Match", "format": None, "always_show": False},
28 | "re_modifiers": {"name": "RE Modifiers", "format": None, "always_show": False},
29 | "replace_pattern": {"name": "Replace Pattern", "format": None, "always_show": False},
30 | "cache_ttl": {"name": "Cache TTL", "format": None, "always_show": False},
31 | "cache_empty_result": {"name": "Cache Empty", "format": None, "always_show": False},
32 | "cache_timeout": {"name": "Cache Timeout", "format": None, "always_show": False},
33 | "reconnect": {"name": "Reconnect", "format": None, "always_show": False},
34 | "timeout": {"name": "Timeout", "format": None, "always_show": False},
35 | "retries": {"name": "Retries", "format": None, "always_show": False},
36 | "delay": {"name": "Delay", "format": None, "always_show": False},
37 | "next_query_flagIN": {"name": "Next flagIN", "format": None, "always_show": False},
38 | "mirror_flagOUT": {"name": "Mirror flagOUT", "format": None, "always_show": False},
39 | "mirror_hostgroup": {"name": "Mirror HG", "format": None, "always_show": False},
40 | "error_msg": {"name": "Error Msg", "format": None, "always_show": False},
41 | "OK_msg": {"name": "OK Msg", "format": None, "always_show": False},
42 | "sticky_conn": {"name": "Sticky Conn", "format": None, "always_show": False},
43 | "multiplex": {"name": "Multiplex", "format": None, "always_show": False},
44 | "gtid_from_hostgroup": {"name": "GTID from HG", "format": None, "always_show": False},
45 | "attributes": {"name": "Attributes", "format": None, "always_show": False},
46 | "comment": {"name": "Comment", "format": None, "always_show": False},
47 | }
48 |
49 | # Filter only relevant columns from all_columns based on data presence or always_show flag
50 | columns_with_data = {
51 | column
52 | for row in dolphie.proxysql_mysql_query_rules
53 | for column, value in row.items()
54 | if value not in (None, "", "NULL", 0, "0") or all_columns[column]["always_show"]
55 | }
56 |
57 | # Build the filtered columns dictionary using only columns_with_data
58 | columns_filtered = {
59 | column: {
60 | "name": props["name"],
61 | "width": None,
62 | "format": props["format"],
63 | }
64 | for column, props in all_columns.items()
65 | if column in columns_with_data
66 | }
67 |
68 | mysql_query_rules = tab.proxysql_mysql_query_rules_datatable
69 |
70 | # Clear table if columns change
71 | if len(mysql_query_rules.columns) != len(columns_filtered):
72 | mysql_query_rules.clear(columns=True)
73 |
74 | # Add columns to the datatable if it is empty
75 | if not mysql_query_rules.columns:
76 | for column_key, column_data in columns_filtered.items():
77 | column_name = column_data["name"]
78 | column_width = column_data["width"]
79 | mysql_query_rules.add_column(column_name, key=column_key, width=column_width)
80 |
81 | for row in dolphie.proxysql_mysql_query_rules:
82 | row_id = row["rule_id"]
83 | row_values = []
84 |
85 | for column_id, (column_key, column_data) in enumerate(columns_filtered.items()):
86 | column_name = column_data["name"]
87 | column_format = column_data["format"]
88 | column_value = row.get(column_key)
89 |
90 | # Calculate the values per second for the following columns
91 | if column_key in ["hits_s"]:
92 | if not dolphie.proxysql_per_second_data.get(row_id, {}).get(column_key, 0):
93 | column_value = "[dark_gray]0"
94 | else:
95 | value_diff = int(column_value) - dolphie.proxysql_per_second_data.get(row_id, {}).get(column_key, 0)
96 | column_value = round(value_diff / dolphie.polling_latency)
97 |
98 | dolphie.proxysql_per_second_data.setdefault(row_id, {})[column_key] = int(row.get(column_key, 0))
99 |
100 | if column_key in ["apply", "log"]:
101 | column_value = "Yes" if column_value == "1" else "No"
102 |
103 | if column_format == "number":
104 | column_value = format_number(column_value)
105 |
106 | if column_value is None:
107 | column_value = "[dark_gray]N/A"
108 | elif column_value == "0":
109 | column_value = "[dark_gray]0"
110 |
111 | if row_id in mysql_query_rules.rows:
112 | datatable_value = mysql_query_rules.get_row(row_id)[column_id]
113 |
114 | # Update the datatable if values differ
115 | if column_value != datatable_value:
116 | mysql_query_rules.update_cell(row_id, column_key, column_value)
117 | else:
118 | # Create an array of values to append to the datatable
119 | row_values.append(column_value)
120 |
121 | # Add a new row to the datatable
122 | if row_values:
123 | mysql_query_rules.add_row(*row_values, key=row_id)
124 |
125 | # Remove rows from datatable that no longer exist in the data
126 | if dolphie.proxysql_mysql_query_rules:
127 | current_rows = {row["rule_id"] for row in dolphie.proxysql_mysql_query_rules}
128 | existing_rows = set(mysql_query_rules.rows.keys())
129 |
130 | rows_to_remove = existing_rows - current_rows
131 | for row_id in rows_to_remove:
132 | mysql_query_rules.remove_row(row_id)
133 | else:
134 | if mysql_query_rules.row_count:
135 | mysql_query_rules.clear()
136 |
137 | tab.proxysql_mysql_query_rules_title.update(
138 | f"{dolphie.panels.get_panel_title(dolphie.panels.proxysql_mysql_query_rules.name)} "
139 | f"([$highlight]{mysql_query_rules.row_count}[/$highlight])"
140 | )
141 |
--------------------------------------------------------------------------------
/dolphie/Panels/StatementsSummaryMetrics.py:
--------------------------------------------------------------------------------
1 | from rich.syntax import Syntax
2 |
3 | from dolphie.DataTypes import ConnectionSource
4 | from dolphie.Modules.Functions import format_number, format_picoseconds, format_query
5 | from dolphie.Modules.TabManager import Tab
6 |
7 |
8 | def create_panel(tab: Tab):
9 | dolphie = tab.dolphie
10 | datatable = tab.statements_summary_datatable
11 | query_length_max = 300
12 |
13 | columns = [
14 | {"name": "Schema", "field": "schema_name", "width": 14, "format_number": False},
15 | {"name": "Count", "field": "count_star", "width": 8, "format_number": True},
16 | {"name": "Latency", "field": "sum_timer_wait", "width": 10, "format_number": False},
17 | ]
18 |
19 | if dolphie.is_mysql_version_at_least("8.0") and dolphie.connection_source_alt != ConnectionSource.mariadb:
20 | columns.extend(
21 | [
22 | {"name": "95th %", "field": "quantile_95", "width": 10, "format_number": False},
23 | {"name": "99th %", "field": "quantile_99", "width": 10, "format_number": False},
24 | ]
25 | )
26 |
27 | columns.extend(
28 | [
29 | {"name": "Lock time", "field": "sum_lock_time", "width": 9, "format_number": False},
30 | {"name": "Rows examined", "field": "sum_rows_examined", "width": 13, "format_number": True},
31 | {"name": "Rows affected", "field": "sum_rows_affected", "width": 13, "format_number": True},
32 | {"name": "Rows sent", "field": "sum_rows_sent", "width": 9, "format_number": True},
33 | {"name": "Errors", "field": "sum_errors", "width": 6, "format_number": True},
34 | {"name": "Warnings", "field": "sum_warnings", "width": 8, "format_number": True},
35 | {"name": "Bad idx", "field": "sum_no_good_index_used", "width": 7, "format_number": True},
36 | {"name": "No idx", "field": "sum_no_index_used", "width": 6, "format_number": True},
37 | {"name": "Query", "field": "query", "width": None, "format_number": False},
38 | {"name": "latency_total", "field": "sum_timer_wait", "width": 0, "format_number": False},
39 | ]
40 | )
41 |
42 | if not dolphie.statements_summary_data or not dolphie.statements_summary_data.filtered_data:
43 | datatable.display = False
44 | tab.statements_summary_title.update(
45 | (
46 | f"{tab.dolphie.panels.get_panel_title(tab.dolphie.panels.statements_summary.name)} "
47 | f"([$highlight]0[/$highlight])"
48 | )
49 | )
50 |
51 | return
52 |
53 | datatable.display = True
54 |
55 | if len(datatable.columns) != len(columns):
56 | datatable.clear(columns=True)
57 |
58 | if not tab.statements_summary_datatable.columns:
59 | for column_data in columns:
60 | column_name = column_data["name"]
61 | column_width = column_data["width"]
62 | datatable.add_column(column_name, key=column_name, width=column_width)
63 |
64 | data = tab.dolphie.statements_summary_data.filtered_data
65 | if data:
66 | for digest, metrics in data.items():
67 | row_values = []
68 |
69 | for column_id, (column_data) in enumerate(columns):
70 | column_name = column_data["name"]
71 | column_value = metrics.get(column_data["field"], {})
72 |
73 | if isinstance(column_value, dict):
74 | if tab.statements_summary_radio_set.pressed_button.id == "statements_summary_total":
75 | column_value = column_value.get("t", 0)
76 | elif tab.statements_summary_radio_set.pressed_button.id == "statements_summarys_delta":
77 | column_value = column_value.get("d", 0)
78 | else:
79 | column_value = column_value.get("d_last_sample", 0)
80 |
81 | if column_name == "Query":
82 | if tab.dolphie.show_statements_summary_query_digest_text_sample:
83 | column_value = metrics.get("query_sample_text")
84 | else:
85 | column_value = metrics.get("digest_text")
86 | column_value = format_query(column_value)
87 | elif column_name == "Schema":
88 | column_value = column_value or "[dark_gray]N/A"
89 | elif column_data["format_number"]:
90 | column_value = format_number(column_value)
91 | elif column_name in ("Latency", "Lock time", "CPU time"):
92 | column_value = format_picoseconds(column_value)
93 | elif column_name in ["95th %", "99th %"]:
94 | column_value = format_picoseconds(column_value)
95 |
96 | if column_name != "latency_total" and (column_value == "0" or column_value == 0):
97 | column_value = "[dark_gray]0"
98 |
99 | if digest in tab.statements_summary_datatable.rows:
100 | datatable_value = tab.statements_summary_datatable.get_row(digest)[column_id]
101 |
102 | temp_column_value = column_value
103 | temp_datatable_value = datatable_value
104 |
105 | update_width = False
106 | if column_name == "Query":
107 | update_width = True
108 | if isinstance(column_value, Syntax):
109 | temp_column_value = column_value.code[:query_length_max]
110 | column_value = format_query(temp_column_value)
111 | if isinstance(datatable_value, Syntax):
112 | temp_datatable_value = datatable_value.code
113 |
114 | if temp_column_value != temp_datatable_value:
115 | tab.statements_summary_datatable.update_cell(
116 | digest, column_name, column_value, update_width=update_width
117 | )
118 | else:
119 | # Only show the first {query_length_max} characters of the query
120 | if column_name == "Query" and isinstance(column_value, Syntax):
121 | column_value = format_query(column_value.code[:query_length_max])
122 |
123 | row_values.append(column_value)
124 |
125 | if row_values:
126 | tab.statements_summary_datatable.add_row(*row_values, key=digest)
127 |
128 | if data:
129 | current_rows = set(data.keys())
130 | existing_rows = set(datatable.rows.keys())
131 |
132 | rows_to_remove = existing_rows - current_rows
133 | for row_id in rows_to_remove:
134 | datatable.remove_row(row_id)
135 | else:
136 | if datatable.row_count:
137 | datatable.clear()
138 |
139 | title = (
140 | f"{tab.dolphie.panels.get_panel_title(tab.dolphie.panels.statements_summary.name)} "
141 | f"([$highlight]{tab.statements_summary_datatable.row_count}[/$highlight])"
142 | )
143 | tab.statements_summary_title.update(title)
144 |
145 | tab.statements_summary_datatable.sort("latency_total", reverse=tab.dolphie.sort_by_time_descending)
146 |
--------------------------------------------------------------------------------
/dolphie/Widgets/CommandModal.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | from textual import on
4 | from textual.app import ComposeResult
5 | from textual.binding import Binding
6 | from textual.containers import Horizontal, Vertical
7 | from textual.screen import ModalScreen
8 | from textual.widgets import Button, Checkbox, Input, Label, Rule, Select, Static
9 |
10 | from dolphie.DataTypes import ConnectionSource, HotkeyCommands
11 | from dolphie.Widgets.AutoComplete import AutoComplete, DropdownItem
12 |
13 |
14 | class CommandModal(ModalScreen):
15 | CSS = """
16 | CommandModal {
17 | & > Vertical {
18 | background: #131626;
19 | border: tall #384673;
20 | height: auto;
21 | width: auto;
22 |
23 | & > * {
24 | width: auto;
25 | height: auto;
26 | content-align: center middle;
27 | }
28 | }
29 |
30 | & .command_container {
31 | width: auto;
32 | height: auto;
33 |
34 | & Input, Select {
35 | width: 60;
36 | border-title-color: #d2d2d2;
37 | }
38 | }
39 |
40 | & Label {
41 | width: 100%;
42 | content-align: center middle;
43 | padding-bottom: 1;
44 | }
45 |
46 | & Rule {
47 | width: 100%;
48 | margin-bottom: 1;
49 | }
50 |
51 | & #error_response {
52 | color: #fe5c5c;
53 | width: 100%;
54 | height: auto;
55 | content-align: center middle;
56 | padding-bottom: 1;
57 | }
58 |
59 | & Checkbox {
60 | background: #131626;
61 | border: none;
62 | content-align: center middle;
63 | padding-top: 1;
64 | width: 100%;
65 | }
66 |
67 | & #sleeping_queries {
68 | padding-bottom: 1;
69 | }
70 | }
71 | """
72 | BINDINGS = [
73 | Binding("escape", "app.pop_screen", "", show=False),
74 | ]
75 |
76 | def __init__(
77 | self,
78 | command,
79 | message,
80 | connection_source: ConnectionSource = None,
81 | processlist_data=None,
82 | maximize_panel_options=None,
83 | host_cache_data=None,
84 | max_replay_timestamp=None,
85 | ):
86 | super().__init__()
87 | self.command = command
88 | self.message = message
89 | self.connection_source = connection_source
90 | self.processlist_data = processlist_data
91 | self.host_cache_data = host_cache_data
92 | self.max_replay_timestamp = max_replay_timestamp
93 |
94 | self.dropdown_items = []
95 | if processlist_data:
96 | sorted_keys = sorted(processlist_data.keys(), key=lambda x: int(x))
97 | self.dropdown_items = [DropdownItem(thread_id) for thread_id in sorted_keys]
98 |
99 | self.maximize_panel_select_options = maximize_panel_options or []
100 |
101 | def compose(self) -> ComposeResult:
102 | with Vertical():
103 | with Vertical():
104 | yield Label(f"[b]{self.message}[/b]")
105 |
106 | modal_input = Input(id="modal_input")
107 | filter_by_username_input = Input(id="filter_by_username_input")
108 | filter_by_host_input = Input(id="filter_by_host_input")
109 | filter_by_db_input = Input(id="filter_by_db_input")
110 | filter_by_hostgroup_input = Input(id="filter_by_hostgroup_input")
111 | kill_by_id_input = Input(id="kill_by_id_input")
112 | kill_by_username_input = Input(id="kill_by_username_input")
113 | kill_by_host_input = Input(id="kill_by_host_input")
114 |
115 | with Vertical(id="maximize_panel_container", classes="command_container"):
116 | yield Select(
117 | options=self.maximize_panel_select_options, id="maximize_panel_select", prompt="Select a Panel"
118 | )
119 | yield Label("[b]Note[/b]: Press [b][$yellow]ESC[/b][/$yellow] to exit maximized panel")
120 | with Vertical(id="filter_container", classes="command_container"):
121 | yield filter_by_username_input
122 | yield filter_by_host_input
123 | yield filter_by_db_input
124 | yield filter_by_hostgroup_input
125 | yield AutoComplete(filter_by_username_input, id="filter_by_username_dropdown_items", candidates=[])
126 | yield AutoComplete(filter_by_host_input, id="filter_by_host_dropdown_items", candidates=[])
127 | yield AutoComplete(filter_by_db_input, id="filter_by_db_dropdown_items", candidates=[])
128 | yield AutoComplete(
129 | filter_by_hostgroup_input,
130 | id="filter_by_hostgroup_dropdown_items",
131 | candidates=[],
132 | )
133 |
134 | yield Input(id="filter_by_query_time_input")
135 | yield Input(id="filter_by_query_text_input")
136 | with Vertical(id="kill_container", classes="command_container"):
137 | yield kill_by_id_input
138 | yield AutoComplete(kill_by_id_input, id="kill_by_id_dropdown_items", candidates=[])
139 |
140 | yield Rule(line_style="heavy")
141 |
142 | yield kill_by_username_input
143 | yield kill_by_host_input
144 | yield AutoComplete(kill_by_username_input, id="kill_by_username_dropdown_items", candidates=[])
145 | yield AutoComplete(kill_by_host_input, id="kill_by_host_dropdown_items", candidates=[])
146 |
147 | yield Input(id="kill_by_age_range_input", placeholder="Example: 5-8")
148 | yield Input(id="kill_by_query_text_input")
149 | yield Checkbox("Include sleeping queries", id="sleeping_queries")
150 | yield Label(
151 | "[$dark_gray][b]Note:[/b] Only threads visible and executing (or sleeping)\n"
152 | "in the Processlist panel can be killed in this section"
153 | )
154 |
155 | yield modal_input
156 | yield AutoComplete(
157 | modal_input, id="dropdown_items", candidates=self.dropdown_items, prevent_default_enter=False
158 | )
159 |
160 | yield Static(id="error_response")
161 | with Horizontal(classes="button_container"):
162 | yield Button("Submit", id="submit", variant="primary")
163 | yield Button("Cancel", id="cancel")
164 |
165 | def on_mount(self):
166 | input = self.query_one("#modal_input", Input)
167 | maximize_panel_container = self.query_one("#maximize_panel_container", Vertical)
168 | filter_container = self.query_one("#filter_container", Vertical)
169 | kill_container = self.query_one("#kill_container", Vertical)
170 | self.query_one("#error_response", Static).display = False
171 |
172 | maximize_panel_container.display = False
173 | filter_container.display = False
174 | kill_container.display = False
175 |
176 | if self.command == HotkeyCommands.thread_filter:
177 | input.display = False
178 | filter_container.display = True
179 |
180 | self.query_one("#filter_by_username_input", Input).focus()
181 | self.query_one("#filter_by_username_input", Input).border_title = "Username"
182 | self.query_one("#filter_by_username_dropdown_items", AutoComplete).candidates = self.create_dropdown_items(
183 | "user"
184 | )
185 | self.query_one("#filter_by_host_input", Input).border_title = "Host/IP"
186 | self.query_one("#filter_by_host_dropdown_items", AutoComplete).candidates = self.create_dropdown_items(
187 | "host"
188 | )
189 | self.query_one("#filter_by_db_input", Input).border_title = "Database"
190 | self.query_one("#filter_by_db_dropdown_items", AutoComplete).candidates = self.create_dropdown_items("db")
191 | self.query_one("#filter_by_query_time_input", Input).border_title = (
192 | "Minimum Query Time [$dark_gray](seconds)"
193 | )
194 | self.query_one("#filter_by_query_text_input", Input).border_title = (
195 | "Partial Query Text [$dark_gray](case-sensitive)"
196 | )
197 |
198 | if self.connection_source != ConnectionSource.proxysql:
199 | self.query_one("#filter_by_hostgroup_input", Input).display = False
200 | else:
201 | self.query_one("#filter_by_host_input", Input).border_title = "Backend Host/IP"
202 | self.query_one("#filter_by_hostgroup_input", Input).border_title = "Hostgroup"
203 | self.query_one("#filter_by_hostgroup_dropdown_items", AutoComplete).candidates = (
204 | self.create_dropdown_items("hostgroup")
205 | )
206 | elif self.command == HotkeyCommands.thread_kill_by_parameter:
207 | input.display = False
208 | kill_container.display = True
209 |
210 | self.query_one("#kill_by_id_input", Input).focus()
211 | self.query_one("#kill_by_id_dropdown_items", AutoComplete).candidates = self.dropdown_items
212 | self.query_one("#kill_by_id_input", Input).border_title = "Thread ID [$dark_gray](enter submits)"
213 | self.query_one("#kill_by_username_input", Input).border_title = "Username"
214 | self.query_one("#kill_by_username_dropdown_items", AutoComplete).candidates = self.create_dropdown_items(
215 | "user"
216 | )
217 | self.query_one("#kill_by_host_input", Input).border_title = "Host/IP"
218 | self.query_one("#kill_by_host_dropdown_items", AutoComplete).candidates = self.create_dropdown_items("host")
219 | self.query_one("#kill_by_age_range_input", Input).border_title = "Age Range [$dark_gray](seconds)"
220 | self.query_one("#kill_by_query_text_input", Input).border_title = (
221 | "Partial Query Text [$dark_gray](case-sensitive)"
222 | )
223 |
224 | sleeping_queries_checkbox = self.query_one("#sleeping_queries", Checkbox)
225 | sleeping_queries_checkbox.toggle()
226 |
227 | input.placeholder = "Select an option from above"
228 | elif self.command == HotkeyCommands.maximize_panel:
229 | input.display = False
230 | maximize_panel_container.display = True
231 | elif self.command == HotkeyCommands.rename_tab:
232 | input.placeholder = "Colors can be added by wrapping them in []"
233 | input.styles.width = 50
234 | input.focus()
235 | elif self.command == HotkeyCommands.variable_search:
236 | input.placeholder = "Input 'all' to show everything"
237 | input.focus()
238 | elif self.command in [HotkeyCommands.show_thread]:
239 | input.placeholder = "Input a Thread ID"
240 | input.focus()
241 | elif self.command == HotkeyCommands.refresh_interval:
242 | input.placeholder = "Input a refresh interval"
243 | input.focus()
244 | elif self.command == HotkeyCommands.replay_seek:
245 | if self.max_replay_timestamp:
246 | input.value = self.max_replay_timestamp
247 | input.placeholder = "Format: 2024-07-25 13:00:00"
248 | input.focus()
249 | else:
250 | input.focus()
251 |
252 | def create_dropdown_items(self, field):
253 | dropdown_items = []
254 |
255 | if field:
256 | # Filter out None values before sorting
257 | sorted_array = sorted(
258 | set(
259 | getattr(thread, field)
260 | for thread in self.processlist_data.values()
261 | if getattr(thread, field) is not None
262 | )
263 | )
264 | dropdown_items = [DropdownItem(str(value)) for value in sorted_array]
265 |
266 | return dropdown_items
267 |
268 | def on_button_pressed(self, event: Button.Pressed) -> None:
269 | if event.button.id != "submit":
270 | self.app.pop_screen()
271 | return
272 |
273 | modal_input = self.query_one("#modal_input", Input).value
274 | if not modal_input and self.command not in [
275 | HotkeyCommands.rename_tab,
276 | HotkeyCommands.thread_kill_by_parameter,
277 | HotkeyCommands.thread_filter,
278 | HotkeyCommands.maximize_panel,
279 | ]:
280 | self.update_error_response("Input cannot be empty")
281 | return
282 |
283 | if self.command == HotkeyCommands.thread_filter:
284 | # Fetch all filter values
285 | filters = {
286 | "username": self.query_one("#filter_by_username_input", Input).value,
287 | "host": self.query_one("#filter_by_host_input", Input).value,
288 | "db": self.query_one("#filter_by_db_input", Input).value,
289 | "hostgroup": self.query_one("#filter_by_hostgroup_input", Input).value,
290 | "query_time": self.query_one("#filter_by_query_time_input", Input).value,
291 | "query_text": self.query_one("#filter_by_query_text_input", Input).value,
292 | }
293 |
294 | # Use IP address instead of hostname since that's what is used in the processlist
295 | if filters["host"]:
296 | filters["host"] = next(
297 | (ip for ip, addr in self.host_cache_data.candidates() if filters["host"] == addr), filters["host"]
298 | )
299 |
300 | # Validate numeric fields
301 | for value, field_name in [(filters["query_time"], "Query time"), (filters["hostgroup"], "Hostgroup")]:
302 | if value and not re.search(r"^\d+$", value):
303 | self.update_error_response(f"{field_name} must be an integer")
304 | return
305 |
306 | # Ensure at least one filter is provided
307 | if not any(filters.values()):
308 | self.update_error_response("At least one field must be provided")
309 | return
310 |
311 | # Dismiss with the filter values
312 | self.dismiss(list(filters.values()))
313 | elif self.command == HotkeyCommands.thread_kill_by_parameter:
314 | # Get input values
315 | kill_by_id = self.query_one("#kill_by_id_input", Input).value
316 | kill_by_username = self.query_one("#kill_by_username_input", Input).value
317 | kill_by_host = self.query_one("#kill_by_host_input", Input).value
318 | kill_by_age_range = self.query_one("#kill_by_age_range_input", Input).value
319 | kill_by_query_text = self.query_one("#kill_by_query_text_input", Input).value
320 | checkbox_sleeping_queries = self.query_one("#sleeping_queries", Checkbox).value
321 |
322 | age_range_lower_limit, age_range_upper_limit = None, None
323 |
324 | if kill_by_id and not kill_by_id.isdigit():
325 | self.update_error_response("Thread ID must be a number")
326 | return
327 |
328 | # Process and validate age range input
329 | if kill_by_age_range:
330 | match = re.match(r"(\d+)-(\d+)", kill_by_age_range)
331 | if match:
332 | age_range_lower_limit, age_range_upper_limit = map(int, match.groups())
333 | if age_range_lower_limit > age_range_upper_limit:
334 | self.update_error_response("Invalid age range! Lower limit can't be higher than upper")
335 | return
336 | else:
337 | self.update_error_response("Invalid age range")
338 | return
339 |
340 | # Ensure thread ID or at least one parameter is provided
341 | if not any([kill_by_id, kill_by_username, kill_by_host, kill_by_age_range, kill_by_query_text]):
342 | self.update_error_response("Thread ID or at least one parameter must be provided")
343 | return
344 |
345 | # Dismiss with the filter values
346 | self.dismiss(
347 | [
348 | kill_by_id,
349 | kill_by_username,
350 | kill_by_host,
351 | kill_by_age_range,
352 | age_range_lower_limit,
353 | age_range_upper_limit,
354 | kill_by_query_text,
355 | checkbox_sleeping_queries,
356 | ]
357 | )
358 |
359 | elif self.command in {HotkeyCommands.show_thread}:
360 | if modal_input not in self.processlist_data:
361 | self.update_error_response(f"Thread ID [b]{modal_input}[/b] does not exist")
362 | return
363 |
364 | if not modal_input.isdigit():
365 | self.update_error_response("Thread ID must be a number")
366 | return
367 |
368 | self.dismiss(modal_input)
369 |
370 | elif self.command == HotkeyCommands.refresh_interval:
371 | try:
372 | # Convert input to float and check if it's a number at same time
373 | modal_input = float(modal_input)
374 | except ValueError:
375 | self.update_error_response("Input must be a number")
376 | return
377 |
378 | if modal_input <= 0:
379 | self.update_error_response("Input must be greater than 0")
380 | return
381 |
382 | self.dismiss(modal_input)
383 | elif self.command == HotkeyCommands.replay_seek:
384 | if not re.search(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}", modal_input):
385 | self.update_error_response("Invalid timestamp format")
386 | return
387 |
388 | self.dismiss(modal_input)
389 | elif self.command == HotkeyCommands.maximize_panel:
390 | maximize_panel = self.query_one("#maximize_panel_select", Select).value
391 | if maximize_panel == Select.BLANK:
392 | self.update_error_response("Please select a panel to maximize")
393 | return
394 |
395 | self.dismiss(maximize_panel)
396 | else:
397 | self.dismiss(modal_input)
398 |
399 | def update_error_response(self, message):
400 | error_response = self.query_one("#error_response", Static)
401 | error_response.display = True
402 | error_response.update(message)
403 |
404 | @on(Input.Submitted, "Input")
405 | def on_input_submitted(self, event: Input.Submitted):
406 | if self.command not in [HotkeyCommands.thread_filter, HotkeyCommands.thread_kill_by_parameter]:
407 | self.query_one("#submit", Button).press()
408 |
409 | @on(Input.Submitted, "#kill_by_id_input")
410 | def on_kill_by_id_input_submitted(self, event: Input.Submitted):
411 | self.query_one("#submit", Button).press()
412 |
--------------------------------------------------------------------------------
/dolphie/Widgets/CommandScreen.py:
--------------------------------------------------------------------------------
1 | from textual.binding import Binding
2 | from textual.containers import Center
3 | from textual.screen import Screen
4 | from textual.widgets import Static
5 |
6 | from dolphie.Widgets.TopBar import TopBar
7 |
8 |
9 | class CommandScreen(Screen):
10 | CSS = """
11 | CommandScreen {
12 | & Center {
13 | padding: 1;
14 |
15 | & > Static {
16 | padding-left: 1;
17 | padding-right: 1;
18 | background: #101626;
19 | border: tall #1d253e;
20 | width: auto;
21 | }
22 | }
23 | }
24 | """
25 |
26 | BINDINGS = [
27 | Binding("q", "app.pop_screen", "", show=False),
28 | ]
29 |
30 | def __init__(self, connection_status, app_version, host, data):
31 | super().__init__()
32 | self.connection_status = connection_status
33 | self.app_version = app_version
34 | self.host = host
35 | self.data = data
36 |
37 | def compose(self):
38 | yield TopBar(connection_status=self.connection_status, app_version=self.app_version, host=self.host)
39 | yield Center(Static(self.data, shrink=True))
40 |
--------------------------------------------------------------------------------
/dolphie/Widgets/EventLogScreen.py:
--------------------------------------------------------------------------------
1 | import textwrap
2 |
3 | from textual import events, on, work
4 | from textual.app import ComposeResult
5 | from textual.binding import Binding
6 | from textual.containers import Container, Horizontal
7 | from textual.screen import Screen
8 | from textual.widgets import DataTable, Input, Label, Switch
9 |
10 | from dolphie.Modules.MySQL import Database
11 | from dolphie.Modules.Queries import MySQLQueries
12 | from dolphie.Widgets.SpinnerWidget import SpinnerWidget
13 | from dolphie.Widgets.TopBar import TopBar
14 |
15 |
16 | class EventLog(Screen):
17 | CSS = """
18 | EventLog {
19 | & Horizontal {
20 | height: auto;
21 | align: center top;
22 | background: #0a0e1b;
23 | width: 100%;
24 |
25 | & > Label {
26 | color: #bbc8e8;
27 | text-style: bold;
28 | margin-right: -1;
29 | }
30 | }
31 | & DataTable {
32 | background: #0a0e1b;
33 | border: none;
34 | overflow-x: auto;
35 | max-height: 100%;
36 |
37 | &:focus {
38 | background-tint: #0a0e1b;
39 | }
40 | }
41 | & SpinnerWidget {
42 | margin-top: 1;
43 | }
44 | & .input_container {
45 | align: left top;
46 | padding-left: 1;
47 |
48 | & > Input {
49 | border: none;
50 | background: #0a0e1b;
51 | margin: 0;
52 | height: 1;
53 | }
54 | }
55 | & #days_container {
56 | & > Input {
57 | width: 15;
58 | }
59 | & > Label {
60 | margin-right: 2;
61 | }
62 | }
63 | & #info {
64 | padding-top: 1;
65 | width: 100%;
66 | text-align: center;
67 | text-style: bold;
68 | }
69 | & #search {
70 | width: 90%;
71 | margin-bottom: 1;
72 | }
73 | & #help {
74 | color: #8f9fc1;
75 | width: 100%;
76 | content-align: right middle;
77 | }
78 | }
79 | """
80 |
81 | BINDINGS = [
82 | Binding("q", "app.pop_screen", "", show=False),
83 | ]
84 |
85 | def __init__(self, connection_status, app_version, host, db_connection: Database):
86 | super().__init__()
87 |
88 | self.connection_status = connection_status
89 | self.app_version = app_version
90 | self.host = host
91 | self.db_connection = db_connection
92 |
93 | self.levels = {
94 | "system": {"active": True, "sql": "prio = 'System'"},
95 | "warning": {"active": True, "sql": "prio = 'Warning'"},
96 | "error": {"active": True, "sql": "prio = 'Error'"},
97 | "note": {"active": True, "sql": "prio = 'Note'"},
98 | }
99 |
100 | def on_mount(self):
101 | self.datatable = self.query_one(DataTable)
102 | self.datatable.focus()
103 |
104 | self.spinner = self.query_one(SpinnerWidget)
105 | self.info = self.query_one("#info", Label)
106 | self.search_text = self.query_one("#search", Input)
107 | self.days_to_display = self.query_one("#days", Input)
108 |
109 | self.info.display = False
110 | self.datatable.display = False
111 |
112 | self.update_datatable()
113 |
114 | @on(events.Key)
115 | def on_keypress(self, event: events.Key):
116 | if event.key == "1":
117 | self.datatable.move_cursor(row=0)
118 | elif event.key == "2":
119 | self.datatable.move_cursor(row=self.datatable.row_count - 1)
120 | elif event.key == "r":
121 | self.update_datatable()
122 |
123 | def compose(self) -> ComposeResult:
124 | yield TopBar(connection_status=self.connection_status, app_version=self.app_version, host=self.host)
125 | yield Label(
126 | "[b white]r[/b white] = refresh/[b white]1[/b white] = top of events/"
127 | "[b white]2[/b white] = bottom of events",
128 | id="help",
129 | )
130 | with Horizontal():
131 | switch_options = [("System", "system"), ("Warning", "warning"), ("Error", "error"), ("Note", "note")]
132 | for label, switch_id in switch_options:
133 | yield Label(label)
134 | yield Switch(animate=False, id=switch_id, value=True)
135 | with Horizontal(id="days_container", classes="input_container"):
136 | yield Label("Days to display")
137 | yield Input(id="days", value="30")
138 | with Horizontal(id="search_container", classes="input_container"):
139 | yield Label("Search event text")
140 | yield Input(id="search", placeholder="Specify event text to display")
141 | yield SpinnerWidget(id="spinner", text="Loading events")
142 | yield Label("", id="info")
143 | with Container():
144 | yield DataTable(show_cursor=False)
145 |
146 | @on(Input.Submitted, "Input")
147 | def event_search(self):
148 | self.update_datatable()
149 |
150 | @work(thread=True)
151 | def update_datatable(self):
152 | for switch in self.query(Switch):
153 | self.levels[switch.id]["active"] = switch.value
154 |
155 | # Verify days is a number
156 | try:
157 | int(self.days_to_display.value)
158 | except ValueError:
159 | self.datatable.display = False
160 | self.info.display = True
161 | self.info.update("[red]Days to display must be a number[/red]")
162 | return
163 |
164 | self.spinner.show()
165 |
166 | self.info.display = False
167 | self.datatable.display = False
168 |
169 | active_sql_list = [data["sql"] for data in self.levels.values() if data["active"]]
170 | where_clause = " OR ".join(active_sql_list)
171 |
172 | if self.search_text.value:
173 | where_clause = f"({where_clause}) AND (data LIKE '%{self.search_text.value}%')"
174 |
175 | self.datatable.clear(columns=True)
176 | self.datatable.add_column("Date/Time")
177 | self.datatable.add_column("Subsystem")
178 | self.datatable.add_column("Level")
179 | self.datatable.add_column("Code")
180 |
181 | if where_clause:
182 | query = MySQLQueries.error_log.replace("$1", f"AND ({where_clause})")
183 | query = query.replace("$2", f"AND logged > NOW() - INTERVAL {self.days_to_display.value} DAY")
184 | event_count = self.db_connection.execute(query)
185 | data = self.db_connection.fetchall()
186 |
187 | if data:
188 | self.datatable.add_column(f"Event ({event_count})")
189 |
190 | for row in data:
191 | level_color = ""
192 | if row["level"] == "Error":
193 | level_color = "red"
194 | elif row["level"] == "Warning":
195 | level_color = "yellow"
196 | elif row["level"] == "Note":
197 | level_color = "dark_gray"
198 |
199 | level = row["level"]
200 | if level_color:
201 | level = f"[{level_color}]{row['level']}[/{level_color}]"
202 |
203 | timestamp = f"[#858A97]{row['timestamp'].strftime('%Y-%m-%d %H:%M:%S')}[/#858A97]"
204 | error_code = f"[label]{row['error_code']}[/label]"
205 | subsystem = row["subsystem"]
206 |
207 | # Wrap the message to 78% of console width so hopefully we don't get a scrollbar
208 | wrapped_message = textwrap.wrap(row["message"], width=round(self.app.console.width * 0.75))
209 | wrapped_message = "\n".join(wrapped_message)
210 |
211 | line_counts = [cell.count("\n") + 1 for cell in wrapped_message]
212 | height = max(line_counts)
213 |
214 | self.datatable.add_row(timestamp, subsystem, level, error_code, wrapped_message, height=height)
215 |
216 | self.datatable.display = True
217 | self.datatable.focus()
218 | else:
219 | self.datatable.display = False
220 | self.info.display = True
221 | self.info.update("No events found")
222 | else:
223 | self.datatable.display = False
224 | self.info.display = True
225 | self.info.update("No switches selected. Toggle the switches above to filter what events you'd like to see")
226 |
227 | self.spinner.hide()
228 |
--------------------------------------------------------------------------------
/dolphie/Widgets/ProxySQLThreadScreen.py:
--------------------------------------------------------------------------------
1 | from rich.style import Style
2 | from textual.app import ComposeResult
3 | from textual.binding import Binding
4 | from textual.containers import Center, Container
5 | from textual.screen import Screen
6 | from textual.widgets import Label, Rule, Static, TextArea
7 | from textual.widgets.text_area import TextAreaTheme
8 |
9 | from dolphie.Widgets.TopBar import TopBar
10 |
11 |
12 | class ProxySQLThreadScreen(Screen):
13 | AUTO_FOCUS = ""
14 |
15 | CSS = """
16 | ProxySQLThreadScreen {
17 | background: #0a0e1b;
18 |
19 | & Container {
20 | height: auto;
21 | }
22 |
23 | & #thread_container {
24 | margin-top: 1;
25 | height: auto;
26 | }
27 |
28 | & .title {
29 | width: 100%;
30 | content-align: center middle;
31 | color: #bbc8e8;
32 | text-style: bold;
33 | }
34 |
35 | & .table {
36 | content-align: center middle;
37 | background: #101626;
38 | border: tall #1d253e;
39 | padding-left: 1;
40 | padding-right: 1;
41 | height: auto;
42 | width: auto;
43 | }
44 |
45 | & TextArea {
46 | border: tall #1d253e;
47 | width: 100;
48 | height: 35;
49 | }
50 | }
51 | """
52 |
53 | BINDINGS = [
54 | Binding("q", "app.pop_screen", "", show=False),
55 | ]
56 |
57 | def __init__(
58 | self,
59 | connection_status: str,
60 | app_version: str,
61 | host: str,
62 | thread_table: str,
63 | query: str,
64 | extended_info: str,
65 | ):
66 | super().__init__()
67 |
68 | self.connection_status = connection_status
69 | self.app_version = app_version
70 | self.host = host
71 |
72 | self.thread_table = thread_table
73 | self.formatted_query = query
74 | self.extended_info = extended_info
75 |
76 | dracula = TextAreaTheme.get_builtin_theme("dracula")
77 | dracula.base_style = Style(bgcolor="#101626")
78 | dracula.gutter_style = Style(color="#606e88")
79 | dracula.cursor_line_gutter_style = Style(color="#95a7c7", bgcolor="#20243b")
80 | dracula.cursor_line_style = Style(bgcolor="#20243b")
81 | dracula.selection_style = Style(bgcolor="#293c71")
82 | dracula.cursor_style = Style(bgcolor="#7a8ab2", color="#121e3a")
83 | dracula.syntax_styles = {
84 | "json.label": Style(color="#879bca", bold=True),
85 | "number": Style(color="#ca87a5"),
86 | }
87 |
88 | self.extended_info_text_area = TextArea(
89 | language="json", theme="dracula", show_line_numbers=True, read_only=True
90 | )
91 | if self.extended_info:
92 | self.extended_info_text_area.text = extended_info
93 |
94 | def on_mount(self):
95 | self.query_one("#thread_table").update(self.thread_table)
96 |
97 | if self.formatted_query:
98 | self.query_one("#query").update(self.formatted_query)
99 | else:
100 | self.query_one("#query_container").display = False
101 |
102 | if not self.extended_info:
103 | self.query_one("#extended_info_container").display = False
104 |
105 | def compose(self) -> ComposeResult:
106 | yield TopBar(connection_status=self.connection_status, app_version=self.app_version, host=self.host)
107 |
108 | with Container(id="thread_container"):
109 | yield Label("Thread Details", classes="title")
110 | yield Center(Static(id="thread_table", shrink=True, classes="table"))
111 |
112 | with Container(id="query_container"):
113 | yield Rule(line_style="heavy")
114 | yield Label("Query Details", classes="title")
115 | yield Center(Static(id="query", shrink=True, classes="table"))
116 |
117 | with Container(id="extended_info_container"):
118 | yield Rule(line_style="heavy")
119 | yield Label("Extended Information", classes="title")
120 | yield Center(self.extended_info_text_area)
121 |
--------------------------------------------------------------------------------
/dolphie/Widgets/SpinnerWidget.py:
--------------------------------------------------------------------------------
1 | from rich.spinner import Spinner
2 | from textual.widgets import Static
3 |
4 |
5 | class SpinnerWidget(Static):
6 | def __init__(self, id, text):
7 | super().__init__("")
8 | self._id = id
9 | self._spinner = Spinner("bouncingBar", text=f"[label]{text}", speed=0.7)
10 |
11 | def on_mount(self) -> None:
12 | self.update_render = self.set_interval(1 / 60, self.update_spinner)
13 |
14 | def hide(self) -> None:
15 | self.display = False
16 |
17 | def show(self) -> None:
18 | self.display = True
19 |
20 | def update_spinner(self) -> None:
21 | self.update(self._spinner)
22 |
--------------------------------------------------------------------------------
/dolphie/Widgets/ThreadScreen.py:
--------------------------------------------------------------------------------
1 | from rich.style import Style
2 | from rich.text import Text
3 | from textual.app import ComposeResult
4 | from textual.binding import Binding
5 | from textual.containers import Center, Container, ScrollableContainer
6 | from textual.screen import Screen
7 | from textual.widgets import (
8 | DataTable,
9 | Label,
10 | Rule,
11 | Static,
12 | TabbedContent,
13 | TabPane,
14 | TextArea,
15 | )
16 | from textual.widgets.text_area import TextAreaTheme
17 |
18 | from dolphie.Modules.Functions import format_number
19 | from dolphie.Widgets.TopBar import TopBar
20 |
21 |
22 | class ThreadScreen(Screen):
23 | CSS = """
24 | ThreadScreen {
25 | background: #0a0e1b;
26 |
27 | & #explain_table {
28 | margin-top: 1;
29 | background: #101626;
30 | border: tall #1d253e;
31 | overflow-x: auto;
32 | min-height: 5;
33 | max-height: 15;
34 | width: 100%;
35 | }
36 |
37 | & #explain_failure {
38 | margin-top: 1;
39 | max-width: 120;
40 | }
41 |
42 | & Container {
43 | height: auto;
44 | }
45 |
46 | & #thread_container {
47 | margin-top: 1;
48 | height: auto;
49 | layout: horizontal;
50 | }
51 |
52 | & .title {
53 | width: 100%;
54 | content-align: center middle;
55 | color: #bbc8e8;
56 | text-style: bold;
57 | }
58 |
59 | & Center {
60 | height: auto;
61 | }
62 |
63 | & #query {
64 | width: auto;
65 | }
66 |
67 | & .container > Center {
68 | layout: horizontal;
69 | }
70 |
71 | & ScrollableContainer {
72 | height: auto;
73 | width: 50vw;
74 | max-height: 15;
75 | }
76 |
77 | & .table {
78 | content-align: center middle;
79 | background: #101626;
80 | border: tall #1d253e;
81 | padding-left: 1;
82 | padding-right: 1;
83 | height: auto;
84 | }
85 |
86 | & TextArea {
87 | border: tall #1d253e;
88 | max-height: 25;
89 | }
90 | }
91 |
92 | """
93 |
94 | BINDINGS = [
95 | Binding("q", "app.pop_screen", "", show=False),
96 | ]
97 |
98 | def __init__(
99 | self,
100 | connection_status: str,
101 | app_version: str,
102 | host: str,
103 | thread_table: str,
104 | user_thread_attributes_table: str,
105 | query: str,
106 | explain_data: str,
107 | explain_json_data: str,
108 | explain_failure: str,
109 | transaction_history_table: str,
110 | ):
111 | super().__init__()
112 |
113 | self.connection_status = connection_status
114 | self.app_version = app_version
115 | self.host = host
116 |
117 | self.thread_table = thread_table
118 | self.user_thread_attributes_table = user_thread_attributes_table
119 | self.formatted_query = query
120 | self.explain_data = explain_data
121 | self.explain_json_data = explain_json_data
122 | self.explain_failure = explain_failure
123 | self.transaction_history_table = transaction_history_table
124 |
125 | dracula = TextAreaTheme.get_builtin_theme("dracula")
126 | dracula.base_style = Style(bgcolor="#101626")
127 | dracula.gutter_style = Style(color="#606e88")
128 | dracula.cursor_line_gutter_style = Style(color="#95a7c7", bgcolor="#20243b")
129 | dracula.cursor_line_style = Style(bgcolor="#20243b")
130 | dracula.selection_style = Style(bgcolor="#293c71")
131 | dracula.cursor_style = Style(bgcolor="#7a8ab2", color="#121e3a")
132 | dracula.syntax_styles = {
133 | "json.label": Style(color="#879bca", bold=True),
134 | "number": Style(color="#ca87a5"),
135 | }
136 |
137 | self.explain_json_text_area = TextArea(language="json", theme="dracula", show_line_numbers=True, read_only=True)
138 |
139 | def on_mount(self):
140 | self.query_one("#thread_table").update(self.thread_table)
141 | self.query_one("#query").update(self.formatted_query)
142 |
143 | if self.transaction_history_table:
144 | self.query_one("#transaction_history_table").update(self.transaction_history_table)
145 | else:
146 | self.query_one("#transaction_history_container").display = False
147 |
148 | if self.user_thread_attributes_table:
149 | self.query_one("#user_thread_attributes_table").update(self.user_thread_attributes_table)
150 | else:
151 | self.query_one("#user_thread_attributes_table").display = False
152 |
153 | if self.formatted_query:
154 | if self.explain_failure:
155 | self.query_one("#explain_tabbed_content").display = False
156 | self.query_one("#explain_failure").update(Text.from_markup(self.explain_failure))
157 | elif self.explain_data:
158 | self.query_one("#explain_failure").display = False
159 |
160 | explain_table = self.query_one("#explain_table", DataTable)
161 |
162 | columns = []
163 | for row in self.explain_data:
164 | values = []
165 | for column, value in row.items():
166 | # Exclude possbile_keys field since it takes up too much space
167 | if column == "possible_keys":
168 | continue
169 |
170 | # Don't duplicate columns
171 | if column not in columns:
172 | explain_table.add_column(f"[label]{column}")
173 | columns.append(column)
174 |
175 | if column == "key" and value is None:
176 | value = "[b white on #B30000]NO INDEX[/b white on #B30000]"
177 |
178 | if column == "rows":
179 | value = format_number(value)
180 |
181 | values.append(str(value))
182 |
183 | explain_table.add_row(*values)
184 | else:
185 | self.query_one("#explain_table").display = False
186 | self.query_one("#explain_failure").display = False
187 | else:
188 | self.query_one("#query_container").display = False
189 |
190 | if self.explain_json_data:
191 | self.explain_json_text_area.text = self.explain_json_data
192 | else:
193 | self.query_one("#explain_tabbed_content").display = False
194 |
195 | def compose(self) -> ComposeResult:
196 | yield TopBar(connection_status=self.connection_status, app_version=self.app_version, host=self.host)
197 |
198 | with Container(id="thread_container", classes="container"):
199 | with Container():
200 | yield Label("Thread Details", classes="title")
201 | yield ScrollableContainer(Static(id="thread_table"), classes="table")
202 | with Container():
203 | yield Label("Thread Attributes", classes="title")
204 | yield ScrollableContainer(Static(id="user_thread_attributes_table"), classes="table")
205 |
206 | with Container(id="query_container", classes="container"):
207 | yield Rule(line_style="heavy")
208 | yield Label("Query", classes="title")
209 | yield Center(Static(id="query", shrink=True, classes="table"))
210 |
211 | yield Center(Label("", id="explain_failure"))
212 | with TabbedContent(id="explain_tabbed_content", classes="container"):
213 | with TabPane("Table", id="table_explain_tab", classes="container"):
214 | yield DataTable(show_cursor=False, id="explain_table", classes="table")
215 |
216 | with TabPane("JSON", id="json_explain_tab", classes="container"):
217 | yield Center(self.explain_json_text_area)
218 |
219 | with Container(id="transaction_history_container", classes="container"):
220 | yield Rule(line_style="heavy")
221 | yield Label("Transaction History", id="transaction_history_label", classes="title")
222 | yield Center(
223 | Static(id="transaction_history_table", shrink=True, classes="table"),
224 | id="transaction_history_table_center",
225 | )
226 |
--------------------------------------------------------------------------------
/dolphie/Widgets/TopBar.py:
--------------------------------------------------------------------------------
1 | from rich.text import Text
2 | from textual.app import ComposeResult
3 | from textual.containers import Container
4 | from textual.reactive import reactive
5 | from textual.widgets import Label
6 |
7 | from dolphie.Modules.Functions import format_bytes
8 |
9 |
10 | class TopBar(Container):
11 | host = reactive("", init=False, always_update=True)
12 | replay_file_size = reactive("", always_update=True)
13 |
14 | def __init__(
15 | self, connection_status="", app_version="", host="", help="press [b highlight]q[/b highlight] to return"
16 | ):
17 | super().__init__()
18 |
19 | self.app_title = Text.from_markup(f" :dolphin: [b light_blue]Dolphie[/b light_blue] [light_blue]v{app_version}")
20 |
21 | self.topbar_title = Label(self.app_title, id="topbar_title")
22 | self.topbar_host = Label("", id="topbar_host")
23 | self.topbar_help = Label(Text.from_markup(help), id="topbar_help")
24 |
25 | self.connection_status = connection_status
26 | self.host = host
27 | self.replay_file_size = None
28 |
29 | def _update_topbar_host(self):
30 | recording_text = (
31 | f"| [b recording]RECORDING[/b recording]: {format_bytes(self.replay_file_size)}"
32 | if self.replay_file_size
33 | else ""
34 | )
35 | self.topbar_host.update(
36 | Text.from_markup(
37 | f"\\[[white]{self.connection_status}[/white]] {self.host} {recording_text}"
38 | if self.connection_status
39 | else ""
40 | )
41 | )
42 |
43 | def watch_replay_file_size(self):
44 | self._update_topbar_host()
45 |
46 | def watch_host(self):
47 | self._update_topbar_host()
48 |
49 | def compose(self) -> ComposeResult:
50 | yield self.topbar_title
51 | yield self.topbar_host
52 | yield self.topbar_help
53 |
--------------------------------------------------------------------------------
/examples/dolphie-daemon.cnf:
--------------------------------------------------------------------------------
1 | [dolphie]
2 | user = dolphie
3 | password = super_secret_password
4 | socket = /var/run/mysqld/mysqld.sock
5 |
6 | refresh_interval = 2
7 | replay_retention_hours = 48
8 |
9 | daemon_mode = true
10 | daemon_mode_log_file = /var/log/dolphie/daemon.log
11 | replay_dir = /var/lib/dolphie/replays
12 |
--------------------------------------------------------------------------------
/examples/dolphie.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Dolphie Daemon Service
3 | After=mysql.service
4 |
5 | [Service]
6 | User=dolphie
7 | ExecStart=/usr/local/bin/dolphie --config-file /etc/dolphie/dolphie-daemon.cnf
8 | StandardOutput=journal
9 | StandardError=journal
10 | Environment=PYTHONUNBUFFERED=1
11 | Restart=on-failure
12 |
13 | [Install]
14 | WantedBy=multi-user.target
15 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "dolphie"
3 | version = "6.10.2"
4 | license = "GPL-3.0-or-later"
5 | description = "Your single pane of glass for real-time analytics into MySQL/MariaDB & ProxySQL"
6 | authors = ["Charles Thompson <01charles.t@gmail.com>"]
7 | readme = "README.md"
8 |
9 | [tool.poetry.dependencies]
10 | python = "^3.8.1"
11 | rich = "^14.0.0"
12 | pymysql = "^1.1.1"
13 | myloginpath = "^0.0.4"
14 | packaging = "^25.0"
15 | requests = "^2.32.3"
16 | sqlparse = "^0.5.3"
17 | textual = {extras = ["syntax"], version = "^3.2.0"}
18 | plotext = "^5.3.2"
19 | zstandard = "^0.23.0"
20 | loguru = "^0.7.3"
21 | orjson = "^3.10.15"
22 | psutil = "^7.0.0"
23 |
24 | [build-system]
25 | requires = ["poetry-core"]
26 | build-backend = "poetry.core.masonry.api"
27 |
28 | [tool.poetry.scripts]
29 | dolphie = "dolphie.App:main"
30 |
--------------------------------------------------------------------------------