├── .gitignore ├── README.md ├── build_dsgen_tools.sh ├── calc_time.sh ├── create_tpcds_db_tbl.sh ├── generate_data.sh ├── load_data.sh ├── pyproject.toml ├── remove_last_pipe.sh ├── sys_pg_metrics_collector.py ├── system_resource_stressor.py ├── test_remove_last_pipe.bats ├── tools ├── ansi.tpl ├── db2.tpl ├── mysql.tpl ├── netezza.tpl ├── oracle.tpl ├── postgresql.tpl ├── sqlserver.tpl └── tpcds.sql ├── tpcds_data └── customer_1_4.dat ├── tpcds_metrics_data.csv ├── tpcds_metrics_data.png ├── tpcds_metrics_subplotter.py ├── tpcds_query └── query_0.sql └── update_query_templates.sh /.gitignore: -------------------------------------------------------------------------------- 1 | .idea* 2 | .DS_Store 3 | *.pyc 4 | **/__pycache__/ 5 | build/ 6 | *.egg-info/ 7 | uv.lock 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TPC-DS is a Decision Support Benchmark 2 | 3 | TPC-DS is a decision support benchmark that models several generally applicable aspects of a decision 4 | support system, including queries and data maintenance. 5 | 6 | ## How to run it 7 | 8 | ```sh 9 | $ tmux new -s tpcds_session 10 | $ export PGPASSWORD='pg-password' 11 | $ ( /usr/bin/time psql -U -d -h -a -f ) >tpcds.log 2>&1 12 | ``` 13 | 14 | ### or 15 | 16 | ```sh 17 | $ nohup bash -c "export PGPASSWORD='pg-password'; /usr/bin/time psql -U -d -h -a -f " >tpcds.log 2>&1 & 18 | ``` 19 | 20 | ## Install dependency libraries in the virtual environment for the purpose of collecting DB performance metrics with uv 21 | 22 | ```sh 23 | $ uv venv --python 3.13.2 24 | $ uv sync 25 | ``` 26 | 27 | ## TPC-DS Metrics Data 28 | ![tpcds_metrics](https://github.com/binbjz/tpcds_pg/blob/master/tpcds_metrics_data.png) 29 | -------------------------------------------------------------------------------- /build_dsgen_tools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================== 4 | # 函数: compile_dsgen_tools 5 | # 描述: 编译dsgen tools (用于TPC-DS) 6 | # 执行程序:nohup /usr/bin/time /bin/bash build_dsgen_tools.sh >>./make_dsgen_tools.log 2>&1 & 7 | # ========================================================== 8 | compile_dsgen_tools() { 9 | local target_dir 10 | local dsgen_makefile 11 | 12 | target_dir="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/tools" 13 | dsgen_makefile="Makefile.suite" 14 | 15 | cd $target_dir || { 16 | echo "Failed to enter target directory." 17 | exit 1 18 | } 19 | 20 | [[ -f $dsgen_makefile ]] || { 21 | echo "$dsgen_makefile does not exist." 22 | exit 2 23 | } 24 | 25 | if make clean; then 26 | if make -f $dsgen_makefile OS=LINUX -j 2; then 27 | echo "Make succeeded." 28 | else 29 | echo "Make failed with $dsgen_makefile." 30 | exit 3 31 | fi 32 | else 33 | echo "Make clean failed." 34 | exit 3 35 | fi 36 | } 37 | 38 | compile_dsgen_tools 39 | -------------------------------------------------------------------------------- /calc_time.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================== 4 | # 函数: calculate_total_time 5 | # 描述: 统计 TPC-DS 查询SQL总的执行时间 6 | # 参数: $1 要分析的日志文件路径 7 | # ========================================================== 8 | calculate_total_time() { 9 | local log_file="$1" 10 | local total_time=0 11 | 12 | total_time=$(awk '/Time:/ { split($2, a, " "); sum += a[1] } END { print sum }' "$log_file") 13 | echo "Total Time: $total_time ms" 14 | } 15 | 16 | log_file="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/tpcds_query/tpcds.log" 17 | calculate_total_time "$log_file" 18 | -------------------------------------------------------------------------------- /create_tpcds_db_tbl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================== 4 | # 函数: create_database 5 | # 描述: 创建一个新的 PostgreSQL 数据库 6 | # 参数: $1 数据库用户名, $2 数据库名称 7 | # 返回: 成功时返回 0, 失败时返回 1 8 | # ========================================================== 9 | create_database() { 10 | local db_user="$1" 11 | local db_name="$2" 12 | if createdb -U "$db_user" "$db_name"; then 13 | echo "数据库 $db_name 成功创建." 14 | return 0 15 | else 16 | echo "数据库 $db_name 创建失败." 17 | return 1 18 | fi 19 | } 20 | 21 | # ========================================================== 22 | # 函数: execute_sql_file 23 | # 描述: 在指定的数据库中执行一个 SQL 文件以创建表 24 | # 参数: $1 数据库用户名, $2 数据库名称, $3 SQL 文件路径 25 | # 返回: 成功时返回 0, 失败时返回 1 26 | # ========================================================== 27 | execute_sql_file() { 28 | local db_user="$1" 29 | local db_name="$2" 30 | local sql_file="$3" 31 | if psql -U "$db_user" -d "$db_name" -a -f "$sql_file"; then 32 | echo "所有表成功创建." 33 | return 0 34 | else 35 | echo "创建表失败." 36 | return 1 37 | fi 38 | } 39 | 40 | # ========================================================== 41 | # 函数: main 42 | # 描述: 脚本的主执行函数 43 | # ========================================================== 44 | main() { 45 | local tools_dir="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/tools" 46 | local db_name="tpcds" 47 | local db_user="postgres" 48 | local db_pass="pg-auth" 49 | local sql_file="${tools_dir}/tpcds.sql" 50 | 51 | export PGPASSWORD="$db_pass" 52 | 53 | cd "$tools_dir" || { 54 | echo "无法进入目录 $tools_dir" 55 | exit 1 56 | } 57 | 58 | if create_database "$db_user" "$db_name"; then 59 | execute_sql_file "$db_user" "$db_name" "$sql_file" 60 | else 61 | echo "数据库创建失败,终止程序并清除环境.." 62 | exit 1 63 | fi 64 | 65 | unset PGPASSWORD 66 | } 67 | 68 | main 69 | -------------------------------------------------------------------------------- /generate_data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DSGEN_PATH="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1" 4 | 5 | # ========================================================== 6 | # 函数: generate_dat_data 7 | # 描述: 生成TPC-DS测试数据 8 | # 确认程序在后台正确执行: 9 | # ps -eo pid,user,%cpu,%mem,vsz,rss,tty,stat,start,time,etime,cmd | grep '[d]sdgen' 10 | # ========================================================== 11 | generate_dat_data() { 12 | local TOOLS_DIR=${1:-"${DSGEN_PATH}/tools"} 13 | local SCALE=${2:-10} 14 | local child 15 | 16 | cd "$TOOLS_DIR" || { 17 | echo "无法进入目录 $TOOLS_DIR" 18 | exit 1 19 | } 20 | 21 | for child in {1..4}; do 22 | nohup /usr/bin/time ./dsdgen -scale "$SCALE" -dir ../tpcds_data/ \ 23 | -parallel 4 -child "${child}" >>../../tpcds_data.log 2>&1 & 24 | done 25 | } 26 | 27 | # ========================================================== 28 | # 函数: generate_query_data 29 | # 描述: 生成TPC-DS查询SQL 30 | # 确认程序在后台正确执行: 31 | # ps -eo pid,user,%cpu,%mem,vsz,rss,tty,stat,start,time,etime,cmd | grep '[d]sqgen' 32 | # ========================================================== 33 | generate_query_data() { 34 | local TPC_DIR=${DSGEN_PATH} 35 | 36 | cd "$TPC_DIR/tools" || { 37 | echo "无法进入目录 $TPC_DIR/tools" 38 | exit 1 39 | } 40 | 41 | nohup /usr/bin/time ./dsqgen -output_dir ../tpcds_query/ \ 42 | -input ../query_templates/templates.lst -scale 1 -dialect postgresql \ 43 | -directory ../query_templates/ >>../../tpcds_query.log 2>&1 & 44 | } 45 | 46 | generate_dat_data "${DSGEN_PATH}/tools" 10 47 | generate_query_data 48 | -------------------------------------------------------------------------------- /load_data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DB_NAME="tpcds" 4 | DB_USER="postgres" 5 | DB_PASS="pg-auth" 6 | 7 | DAT_DIR="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/tpcds_data" 8 | MAX_JOBS=20 9 | 10 | # ========================================================== 11 | # 函数: load_data_to_table 12 | # 描述: 封装数据加载命令的函数,执行 COPY 命令 13 | # 注意: 这里简化了密码处理,实际应用应更安全地处理密码 14 | # 执行程序:nohup /usr/bin/time /bin/bash load_data.sh >>./tpcds_load_data.log 2>&1 & 15 | # ========================================================== 16 | load_data_to_table() { 17 | local dat_file=$1 18 | local table_name 19 | table_name=$(basename "$dat_file" .dat | awk -F'_' '{s=$1; for (i=2; i=0.30.0", 9 | "loguru>=0.7.3", 10 | "matplotlib>=3.10.0", 11 | "numpy>=2.2.3", 12 | "pandas>=2.2.3", 13 | "psutil>=7.0.0", 14 | "pyarrow>=19.0.0", 15 | ] 16 | -------------------------------------------------------------------------------- /remove_last_pipe.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================== 4 | # 函数: process_dat_file 5 | # 描述: 处理单个 .dat 文件,删除每行最后的 "|" 字符 6 | # 参数: $1 .dat 文件的路径 7 | # ps -eo pid,user,%cpu,%mem,vsz,rss,tty,stat,start,time,etime,cmd | grep '[/]usr/bin/time' 8 | # bash remove_last_pipe.sh & 9 | # ========================================================== 10 | process_dat_file() { 11 | local dat_file="$1" 12 | nohup /usr/bin/time /bin/bash -c "sed 's/|$//' $dat_file > ${dat_file}.tmp && \ 13 | mv ${dat_file}.tmp $dat_file && \ 14 | echo \"已处理 $dat_file 文件,删除了每一行最后的 | 字符..\"" >>./tpcds_remove_last_pipe.log 2>&1 & 15 | } 16 | 17 | # ========================================================== 18 | # 函数: process_dat_files 19 | # 描述: 并发地处理指定的测试数据目录中的所有 .dat 文件 20 | # 参数: $1 指定的目录路径 21 | # ========================================================== 22 | process_dat_files() { 23 | local dat_dir="$1" 24 | local dat_file 25 | local count=0 26 | 27 | [[ -d "$dat_dir" ]] || { 28 | echo "指定的目录不存在: $dat_dir" 29 | return 1 30 | } 31 | 32 | for dat_file in "$dat_dir"/*.dat; do 33 | process_dat_file "$dat_file" 34 | # ((count++)) 35 | count=$((count + 1)) 36 | 37 | if ((count % 20 == 0)); then 38 | wait -n 39 | fi 40 | done 41 | wait 42 | return 0 43 | } 44 | 45 | # ========================================================== 46 | # 函数: main 47 | # 描述: 脚本的主执行函数 48 | # ========================================================== 49 | main() { 50 | local dat_dir 51 | dat_dir="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/tpcds_data" 52 | 53 | process_dat_files "$dat_dir" || { 54 | echo "处理 .dat 文件失败.." 55 | exit 1 56 | } 57 | } 58 | 59 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then 60 | main 61 | fi 62 | -------------------------------------------------------------------------------- /sys_pg_metrics_collector.py: -------------------------------------------------------------------------------- 1 | """ 2 | 在虚拟环境中安装依赖库,收集PG性能指标: 3 | $ python -m venv metrics_venv 4 | $ cd metrics_venv/ && source bin/activate 5 | $ pip install asyncpg pandas pyarrow psutil loguru 6 | 7 | $ nohup /usr/bin/time python sys_pg_metrics_collector.py & 8 | 或者自定义输出文件名 9 | $ nohup /usr/bin/time python sys_pg_metrics_collector.py > sys_pg_metrics_collector.log 2>&1 & 10 | 11 | # 查看执行进程 12 | $ ps -eo pid,user,pcpu,pmem,vsz,rss,tty,stat,start,time,etime,cmd | grep '[s]ys_pg_metrics_collector.py' 13 | """ 14 | import asyncio 15 | import asyncpg 16 | import psutil 17 | import pandas as pd 18 | from collections import Counter 19 | from pathlib import Path 20 | from asyncpg.pool import PoolConnectionProxy 21 | from loguru import logger 22 | 23 | 24 | class CSVBuffer: 25 | """ 26 | 更高效地将数据写入到一个单一的 CSV 文件中,而不是每次循环都创建一个新的文件。 27 | 同时,通过缓存和批量写入,写入性能也会得到提升。 28 | """ 29 | 30 | def __init__(self, filename: str, buffer_size: int = 1000): 31 | self.buffer = [] 32 | self.filename = filename 33 | self.buffer_size = buffer_size 34 | self.header_written = Path(filename).exists() 35 | 36 | def append(self, data: dict): 37 | self.buffer.append(data) 38 | if len(self.buffer) >= self.buffer_size: 39 | self.flush() 40 | 41 | def flush(self): 42 | if self.buffer: 43 | df = pd.DataFrame(self.buffer) 44 | if self.header_written: 45 | df.to_csv(self.filename, mode="a", header=False, index=False) 46 | else: 47 | df.to_csv(self.filename, mode="w", header=True, index=False) 48 | self.header_written = True 49 | self.buffer = [] 50 | 51 | 52 | async def collect_system_metrics(): 53 | """ 54 | 在异步环境中运行阻塞性代码,收集系统性能指标。 55 | :return: 返回一个包含各种系统性能指标的字典。实际返回的是一个等待对象 56 | """ 57 | 58 | def collect_metrics_blocking(): 59 | """ 60 | 收集系统级别指标 61 | :return: 返回一个包含各种系统性能指标的字典 62 | """ 63 | cpu_info = psutil.cpu_times_percent(interval=None) 64 | memory_info = psutil.virtual_memory() 65 | io_info = psutil.disk_io_counters() 66 | return { 67 | "cpu_user": cpu_info.user, 68 | "cpu_system": cpu_info.system, 69 | "memory_used": memory_info.used, 70 | "memory_free": memory_info.free, 71 | "io_read": io_info.read_count, 72 | "io_write": io_info.write_count 73 | } 74 | 75 | loop = asyncio.get_running_loop() 76 | return await loop.run_in_executor(None, collect_metrics_blocking) 77 | 78 | 79 | async def collect_pg_metrics(conn: PoolConnectionProxy): 80 | """ 81 | 异步地收集PG内部性能指标。 82 | :param conn: 数据库连接 83 | :return: 返回一个包含各种PG性能指标的字典 84 | """ 85 | metrics = {} 86 | 87 | # 收集会话数 88 | session_count = await conn.fetchval("SELECT count(*) FROM pg_stat_activity;") 89 | metrics["pg_session_count"] = session_count 90 | 91 | # 收集数据库级别统计信息 92 | db_stat = await conn.fetchrow("SELECT datname, xact_commit, xact_rollback FROM pg_stat_database " 93 | "WHERE datname = 'tpcds' LIMIT 1;") 94 | metrics["pg_db_name"] = db_stat["datname"] 95 | metrics["pg_xact_commit"] = db_stat["xact_commit"] 96 | metrics["pg_xact_rollback"] = db_stat["xact_rollback"] 97 | 98 | # 收集后台写入器统计信息 99 | bgwriter_stat = await conn.fetchrow("SELECT buffers_alloc, buffers_backend FROM pg_stat_bgwriter;") 100 | metrics["pg_buffers_alloc"] = bgwriter_stat["buffers_alloc"] 101 | metrics["pg_buffers_backend"] = bgwriter_stat["buffers_backend"] 102 | 103 | # 收集磁盘 I/O 操作 104 | disk_io_stat = await conn.fetchrow("SELECT relname, heap_blks_read, heap_blks_hit FROM " 105 | "pg_statio_user_tables LIMIT 1;") 106 | metrics["pg_disk_io_table_name"] = disk_io_stat["relname"] 107 | metrics["pg_heap_blks_read"] = disk_io_stat["heap_blks_read"] 108 | metrics["pg_heap_blks_hit"] = disk_io_stat["heap_blks_hit"] 109 | 110 | # 收集缓存命中率 111 | cache_hit_ratio = await conn.fetchval("SELECT sum(heap_blks_hit) / (sum(heap_blks_hit) + " 112 | "sum(heap_blks_read)) as ratio FROM pg_statio_user_tables;") 113 | metrics["pg_cache_hit_ratio"] = cache_hit_ratio 114 | 115 | # 收集长时间运行的查询数量 116 | long_running_queries = await conn.fetchval("SELECT count(*) FROM pg_stat_activity WHERE state != 'idle' " 117 | "AND now() - pg_stat_activity.query_start > interval '5 minutes';") 118 | metrics["pg_long_running_queries"] = long_running_queries 119 | 120 | # 收集未授权的锁数量 121 | ungranted_locks = await conn.fetchval("SELECT count(*) FROM pg_locks WHERE granted = false;") 122 | metrics["pg_ungranted_locks"] = ungranted_locks 123 | 124 | return metrics 125 | 126 | 127 | def append_average_to_csv(filename: str): 128 | """ 129 | 读取CSV文件,计算每一列的平均值,并将平均值添加到CSV文件的最后一行。 130 | :param filename: CSV文件名 131 | """ 132 | try: 133 | if Path(filename).exists(): 134 | df = pd.read_csv(filename) 135 | avg_values_all = pd.Series(dtype="object", index=df.columns) 136 | 137 | for col in df.columns: 138 | if pd.api.types.is_numeric_dtype(df[col]): 139 | avg_values_all[col] = df[col].mean() 140 | elif pd.api.types.is_string_dtype(df[col]): 141 | most_common = Counter(df[col].dropna()).most_common(1) 142 | if most_common: 143 | avg_values_all[col] = most_common[0][0] 144 | else: 145 | avg_values_all[col] = "N/A" 146 | df_avg = pd.DataFrame([avg_values_all]) 147 | df_avg.to_csv(filename, mode="a", header=False, index=False) 148 | logger.info("平均值(或其他适当的值)已成功写入到CSV文件..") 149 | else: 150 | logger.warning(f"{filename} 文件不存在,无法计算和写入平均值..") 151 | except Exception as e: 152 | logger.error(f"读取CSV文件或写入平均值时出错: {e}") 153 | 154 | 155 | def check_process_running(process_name: str): 156 | """ 157 | 检查是否有一个名为 process_name 的进程是否正在运行。 158 | """ 159 | for proc in psutil.process_iter(): 160 | try: 161 | proc_info = proc.as_dict(attrs=["pid", "name", "cmdline"]) 162 | if process_name.lower() in " ".join(proc_info["cmdline"]).lower(): 163 | return True 164 | except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): 165 | pass 166 | return False 167 | 168 | 169 | async def main(csv_file: str): 170 | interval_time = 60 171 | monitored_file = "query_0.sql" 172 | 173 | pool_pg = await asyncpg.create_pool( 174 | host="localhost", 175 | port=5432, 176 | user="postgres", 177 | password="pg-auth", 178 | database="tpcds" 179 | ) 180 | 181 | csv_buffer = CSVBuffer(csv_file) 182 | 183 | try: 184 | while True: 185 | if not check_process_running(monitored_file): 186 | logger.info(f"{monitored_file}不再运行,程序即将退出..") 187 | break 188 | 189 | async with pool_pg.acquire() as conn: 190 | system_metrics = await collect_system_metrics() 191 | pg_metrics = await collect_pg_metrics(conn) 192 | all_metrics = {**system_metrics, **pg_metrics} 193 | csv_buffer.append(all_metrics) 194 | await asyncio.sleep(interval_time) 195 | except Exception as e: 196 | logger.error(f"发生错误: {e}") 197 | finally: 198 | if pool_pg: 199 | await pool_pg.close() 200 | csv_buffer.flush() 201 | append_average_to_csv(csv_file) 202 | 203 | 204 | if __name__ == "__main__": 205 | _csv_file = "tpcds_metrics_data.csv" 206 | asyncio.run(main(_csv_file)) 207 | -------------------------------------------------------------------------------- /system_resource_stressor.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | import multiprocessing 4 | import numpy as np 5 | 6 | 7 | def stress_cpu_core(): 8 | """ 9 | 打满单个CPU核心 10 | """ 11 | try: 12 | while True: 13 | a = np.random.rand(500, 500) 14 | b = np.random.rand(500, 500) 15 | c = np.dot(a, b) 16 | except KeyboardInterrupt: 17 | print("CPU压力测试被终止在单核上.") 18 | 19 | 20 | def stress_cpu(): 21 | """ 22 | 打满所有可用的CPU核心 23 | pkill -f "python.*server_stress.py" 24 | """ 25 | try: 26 | num_cores = os.cpu_count() 27 | print(f"开始在 {num_cores} 个核心上打满CPU...") 28 | 29 | threads = [] 30 | for _ in range(num_cores): 31 | t = threading.Thread(target=stress_cpu_core) 32 | t.start() 33 | threads.append(t) 34 | 35 | for t in threads: 36 | t.join() 37 | except KeyboardInterrupt: 38 | print("CPU压力测试被终止.") 39 | 40 | 41 | def stress_memory(): 42 | """ 43 | 打满内存 44 | """ 45 | memory_fill = [] 46 | try: 47 | print("开始打满内存...") 48 | while True: 49 | memory_fill.append(np.zeros((9000, 9000), dtype=np.float64)) 50 | except MemoryError: 51 | print("内存不足,清空数组以避免崩溃..") 52 | memory_fill.clear() 53 | except KeyboardInterrupt: 54 | print("内存压力测试被终止.") 55 | 56 | 57 | if __name__ == "__main__": 58 | try: 59 | choice = input("请选择要执行的操作(1: 打满CPU, 2: 打满内存):") 60 | 61 | if choice == "1": 62 | for _ in range(multiprocessing.cpu_count()): 63 | p = multiprocessing.Process(target=stress_cpu) 64 | p.start() 65 | elif choice == "2": 66 | stress_memory() 67 | else: 68 | print("无效的选择.") 69 | except KeyboardInterrupt: 70 | print("程序被用户终止.") 71 | 72 | -------------------------------------------------------------------------------- /test_remove_last_pipe.bats: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bats 2 | # ========================================================== 3 | # test_remove_last_pipe_opt.bats 4 | # ✓ 测试 process_dat_file 函数删除每行最后的 '|' 字符 5 | # ✓ 测试 process_dat_files 函数处理目录中的所有 .dat 文件 6 | # 7 | # 2 tests, 0 failures 8 | # ========================================================== 9 | load "remove_last_pipe.sh" 10 | 11 | setup() { 12 | TEST_DIR=$(mktemp -d) 13 | } 14 | 15 | teardown() { 16 | rm -rf "$TEST_DIR" 17 | } 18 | 19 | @test "测试 process_dat_file 函数删除每行最后的 '|' 字符" { 20 | # 在这个测试的独立环境中创建测试文件 21 | echo "Sample line 1|" >"$TEST_DIR/sample1.dat" 22 | 23 | process_dat_file "$TEST_DIR/sample1.dat" 24 | wait 25 | 26 | result=$(cat "$TEST_DIR/sample1.dat") 27 | [ "$result" = "Sample line 1" ] 28 | } 29 | 30 | @test "测试 process_dat_files 函数处理目录中的所有 .dat 文件" { 31 | # 在这个测试的独立环境中创建超过20个测试文件来触发 wait -n 逻辑 32 | for i in $(seq 1 25); do 33 | echo "Sample line $i|" >"$TEST_DIR/sample$i.dat" 34 | done 35 | 36 | process_dat_files "$TEST_DIR" 37 | wait 38 | 39 | for i in $(seq 1 25); do 40 | result=$(cat "$TEST_DIR/sample$i.dat") 41 | [ "$result" = "Sample line $i" ] || false 42 | done 43 | } 44 | 45 | -------------------------------------------------------------------------------- /tools/ansi.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = "top %d"; 37 | define __LIMITC = ""; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/db2.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = ""; 37 | define __LIMITC = " fetch first %d rows only"; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/mysql.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = "LIMIT %d"; 37 | define __LIMITC = ""; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/netezza.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = ""; 37 | define __LIMITC = "limit %d"; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/oracle.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = "select * from ("; 36 | define __LIMITB = ""; 37 | define __LIMITC = " ) where rownum <= %d"; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/postgresql.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = ""; 37 | define __LIMITC = "LIMIT %d"; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/sqlserver.tpl: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- 35 | define __LIMITA = ""; 36 | define __LIMITB = "top %d"; 37 | define __LIMITC = ""; 38 | 39 | define _BEGIN = "-- start query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 40 | define _END = "-- end query " + [_QUERY] + " in stream " + [_STREAM] + " using template " + [_TEMPLATE]; 41 | -------------------------------------------------------------------------------- /tools/tpcds.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Legal Notice 3 | -- 4 | -- This document and associated source code (the "Work") is a part of a 5 | -- benchmark specification maintained by the TPC. 6 | -- 7 | -- The TPC reserves all right, title, and interest to the Work as provided 8 | -- under U.S. and international laws, including without limitation all patent 9 | -- and trademark rights therein. 10 | -- 11 | -- No Warranty 12 | -- 13 | -- 1.1 TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE INFORMATION 14 | -- CONTAINED HEREIN IS PROVIDED "AS IS" AND WITH ALL FAULTS, AND THE 15 | -- AUTHORS AND DEVELOPERS OF THE WORK HEREBY DISCLAIM ALL OTHER 16 | -- WARRANTIES AND CONDITIONS, EITHER EXPRESS, IMPLIED OR STATUTORY, 17 | -- INCLUDING, BUT NOT LIMITED TO, ANY (IF ANY) IMPLIED WARRANTIES, 18 | -- DUTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A PARTICULAR 19 | -- PURPOSE, OF ACCURACY OR COMPLETENESS OF RESPONSES, OF RESULTS, OF 20 | -- WORKMANLIKE EFFORT, OF LACK OF VIRUSES, AND OF LACK OF NEGLIGENCE. 21 | -- ALSO, THERE IS NO WARRANTY OR CONDITION OF TITLE, QUIET ENJOYMENT, 22 | -- QUIET POSSESSION, CORRESPONDENCE TO DESCRIPTION OR NON-INFRINGEMENT 23 | -- WITH REGARD TO THE WORK. 24 | -- 1.2 IN NO EVENT WILL ANY AUTHOR OR DEVELOPER OF THE WORK BE LIABLE TO 25 | -- ANY OTHER PARTY FOR ANY DAMAGES, INCLUDING BUT NOT LIMITED TO THE 26 | -- COST OF PROCURING SUBSTITUTE GOODS OR SERVICES, LOST PROFITS, LOSS 27 | -- OF USE, LOSS OF DATA, OR ANY INCIDENTAL, CONSEQUENTIAL, DIRECT, 28 | -- INDIRECT, OR SPECIAL DAMAGES WHETHER UNDER CONTRACT, TORT, WARRANTY, 29 | -- OR OTHERWISE, ARISING IN ANY WAY OUT OF THIS OR ANY OTHER AGREEMENT 30 | -- RELATING TO THE WORK, WHETHER OR NOT SUCH AUTHOR OR DEVELOPER HAD 31 | -- ADVANCE NOTICE OF THE POSSIBILITY OF SUCH DAMAGES. 32 | -- 33 | -- Contributors: 34 | -- Gradient Systems 35 | -- 36 | create table dbgen_version 37 | ( 38 | dv_version varchar(16), 39 | dv_create_date date, 40 | dv_create_time time, 41 | dv_cmdline_args varchar(200) 42 | ); 43 | 44 | create table customer_address 45 | ( 46 | ca_address_sk integer not null, 47 | ca_address_id char(16) not null, 48 | ca_street_number char(10), 49 | ca_street_name varchar(60), 50 | ca_street_type char(15), 51 | ca_suite_number char(10), 52 | ca_city varchar(60), 53 | ca_county varchar(30), 54 | ca_state char(2), 55 | ca_zip char(10), 56 | ca_country varchar(20), 57 | ca_gmt_offset decimal(5, 2), 58 | ca_location_type char(20), 59 | primary key (ca_address_sk) 60 | ); 61 | 62 | create table customer_demographics 63 | ( 64 | cd_demo_sk integer not null, 65 | cd_gender char(1), 66 | cd_marital_status char(1), 67 | cd_education_status char(20), 68 | cd_purchase_estimate integer, 69 | cd_credit_rating char(10), 70 | cd_dep_count integer, 71 | cd_dep_employed_count integer, 72 | cd_dep_college_count integer, 73 | primary key (cd_demo_sk) 74 | ); 75 | 76 | create table date_dim 77 | ( 78 | d_date_sk integer not null, 79 | d_date_id char(16) not null, 80 | d_date date, 81 | d_month_seq integer, 82 | d_week_seq integer, 83 | d_quarter_seq integer, 84 | d_year integer, 85 | d_dow integer, 86 | d_moy integer, 87 | d_dom integer, 88 | d_qoy integer, 89 | d_fy_year integer, 90 | d_fy_quarter_seq integer, 91 | d_fy_week_seq integer, 92 | d_day_name char(9), 93 | d_quarter_name char(6), 94 | d_holiday char(1), 95 | d_weekend char(1), 96 | d_following_holiday char(1), 97 | d_first_dom integer, 98 | d_last_dom integer, 99 | d_same_day_ly integer, 100 | d_same_day_lq integer, 101 | d_current_day char(1), 102 | d_current_week char(1), 103 | d_current_month char(1), 104 | d_current_quarter char(1), 105 | d_current_year char(1), 106 | primary key (d_date_sk) 107 | ); 108 | 109 | create table warehouse 110 | ( 111 | w_warehouse_sk integer not null, 112 | w_warehouse_id char(16) not null, 113 | w_warehouse_name varchar(20), 114 | w_warehouse_sq_ft integer, 115 | w_street_number char(10), 116 | w_street_name varchar(60), 117 | w_street_type char(15), 118 | w_suite_number char(10), 119 | w_city varchar(60), 120 | w_county varchar(30), 121 | w_state char(2), 122 | w_zip char(10), 123 | w_country varchar(20), 124 | w_gmt_offset decimal(5, 2), 125 | primary key (w_warehouse_sk) 126 | ); 127 | 128 | create table ship_mode 129 | ( 130 | sm_ship_mode_sk integer not null, 131 | sm_ship_mode_id char(16) not null, 132 | sm_type char(30), 133 | sm_code char(10), 134 | sm_carrier char(20), 135 | sm_contract char(20), 136 | primary key (sm_ship_mode_sk) 137 | ); 138 | 139 | create table time_dim 140 | ( 141 | t_time_sk integer not null, 142 | t_time_id char(16) not null, 143 | t_time integer, 144 | t_hour integer, 145 | t_minute integer, 146 | t_second integer, 147 | t_am_pm char(2), 148 | t_shift char(20), 149 | t_sub_shift char(20), 150 | t_meal_time char(20), 151 | primary key (t_time_sk) 152 | ); 153 | 154 | create table reason 155 | ( 156 | r_reason_sk integer not null, 157 | r_reason_id char(16) not null, 158 | r_reason_desc char(100), 159 | primary key (r_reason_sk) 160 | ); 161 | 162 | create table income_band 163 | ( 164 | ib_income_band_sk integer not null, 165 | ib_lower_bound integer, 166 | ib_upper_bound integer, 167 | primary key (ib_income_band_sk) 168 | ); 169 | 170 | create table item 171 | ( 172 | i_item_sk integer not null, 173 | i_item_id char(16) not null, 174 | i_rec_start_date date, 175 | i_rec_end_date date, 176 | i_item_desc varchar(200), 177 | i_current_price decimal(7, 2), 178 | i_wholesale_cost decimal(7, 2), 179 | i_brand_id integer, 180 | i_brand char(50), 181 | i_class_id integer, 182 | i_class char(50), 183 | i_category_id integer, 184 | i_category char(50), 185 | i_manufact_id integer, 186 | i_manufact char(50), 187 | i_size char(20), 188 | i_formulation char(20), 189 | i_color char(20), 190 | i_units char(10), 191 | i_container char(10), 192 | i_manager_id integer, 193 | i_product_name char(50), 194 | primary key (i_item_sk) 195 | ); 196 | 197 | create table store 198 | ( 199 | s_store_sk integer not null, 200 | s_store_id char(16) not null, 201 | s_rec_start_date date, 202 | s_rec_end_date date, 203 | s_closed_date_sk integer, 204 | s_store_name varchar(50), 205 | s_number_employees integer, 206 | s_floor_space integer, 207 | s_hours char(20), 208 | s_manager varchar(40), 209 | s_market_id integer, 210 | s_geography_class varchar(100), 211 | s_market_desc varchar(100), 212 | s_market_manager varchar(40), 213 | s_division_id integer, 214 | s_division_name varchar(50), 215 | s_company_id integer, 216 | s_company_name varchar(50), 217 | s_street_number varchar(10), 218 | s_street_name varchar(60), 219 | s_street_type char(15), 220 | s_suite_number char(10), 221 | s_city varchar(60), 222 | s_county varchar(30), 223 | s_state char(2), 224 | s_zip char(10), 225 | s_country varchar(20), 226 | s_gmt_offset decimal(5, 2), 227 | s_tax_precentage decimal(5, 2), 228 | primary key (s_store_sk) 229 | ); 230 | 231 | create table call_center 232 | ( 233 | cc_call_center_sk integer not null, 234 | cc_call_center_id char(16) not null, 235 | cc_rec_start_date date, 236 | cc_rec_end_date date, 237 | cc_closed_date_sk integer, 238 | cc_open_date_sk integer, 239 | cc_name varchar(50), 240 | cc_class varchar(50), 241 | cc_employees integer, 242 | cc_sq_ft integer, 243 | cc_hours char(20), 244 | cc_manager varchar(40), 245 | cc_mkt_id integer, 246 | cc_mkt_class char(50), 247 | cc_mkt_desc varchar(100), 248 | cc_market_manager varchar(40), 249 | cc_division integer, 250 | cc_division_name varchar(50), 251 | cc_company integer, 252 | cc_company_name char(50), 253 | cc_street_number char(10), 254 | cc_street_name varchar(60), 255 | cc_street_type char(15), 256 | cc_suite_number char(10), 257 | cc_city varchar(60), 258 | cc_county varchar(30), 259 | cc_state char(2), 260 | cc_zip char(10), 261 | cc_country varchar(20), 262 | cc_gmt_offset decimal(5, 2), 263 | cc_tax_percentage decimal(5, 2), 264 | primary key (cc_call_center_sk) 265 | ); 266 | 267 | create table customer 268 | ( 269 | c_customer_sk integer not null, 270 | c_customer_id char(16) not null, 271 | c_current_cdemo_sk integer, 272 | c_current_hdemo_sk integer, 273 | c_current_addr_sk integer, 274 | c_first_shipto_date_sk integer, 275 | c_first_sales_date_sk integer, 276 | c_salutation char(10), 277 | c_first_name char(20), 278 | c_last_name char(30), 279 | c_preferred_cust_flag char(1), 280 | c_birth_day integer, 281 | c_birth_month integer, 282 | c_birth_year integer, 283 | c_birth_country varchar(20), 284 | c_login char(13), 285 | c_email_address char(50), 286 | c_last_review_date char(10), 287 | primary key (c_customer_sk) 288 | ); 289 | 290 | create table web_site 291 | ( 292 | web_site_sk integer not null, 293 | web_site_id char(16) not null, 294 | web_rec_start_date date, 295 | web_rec_end_date date, 296 | web_name varchar(50), 297 | web_open_date_sk integer, 298 | web_close_date_sk integer, 299 | web_class varchar(50), 300 | web_manager varchar(40), 301 | web_mkt_id integer, 302 | web_mkt_class varchar(50), 303 | web_mkt_desc varchar(100), 304 | web_market_manager varchar(40), 305 | web_company_id integer, 306 | web_company_name char(50), 307 | web_street_number char(10), 308 | web_street_name varchar(60), 309 | web_street_type char(15), 310 | web_suite_number char(10), 311 | web_city varchar(60), 312 | web_county varchar(30), 313 | web_state char(2), 314 | web_zip char(10), 315 | web_country varchar(20), 316 | web_gmt_offset decimal(5, 2), 317 | web_tax_percentage decimal(5, 2), 318 | primary key (web_site_sk) 319 | ); 320 | 321 | create table store_returns 322 | ( 323 | sr_returned_date_sk integer, 324 | sr_return_time_sk integer, 325 | sr_item_sk integer not null, 326 | sr_customer_sk integer, 327 | sr_cdemo_sk integer, 328 | sr_hdemo_sk integer, 329 | sr_addr_sk integer, 330 | sr_store_sk integer, 331 | sr_reason_sk integer, 332 | sr_ticket_number integer not null, 333 | sr_return_quantity integer, 334 | sr_return_amt decimal(7, 2), 335 | sr_return_tax decimal(7, 2), 336 | sr_return_amt_inc_tax decimal(7, 2), 337 | sr_fee decimal(7, 2), 338 | sr_return_ship_cost decimal(7, 2), 339 | sr_refunded_cash decimal(7, 2), 340 | sr_reversed_charge decimal(7, 2), 341 | sr_store_credit decimal(7, 2), 342 | sr_net_loss decimal(7, 2), 343 | primary key (sr_item_sk, sr_ticket_number) 344 | ); 345 | 346 | create table household_demographics 347 | ( 348 | hd_demo_sk integer not null, 349 | hd_income_band_sk integer, 350 | hd_buy_potential char(15), 351 | hd_dep_count integer, 352 | hd_vehicle_count integer, 353 | primary key (hd_demo_sk) 354 | ); 355 | 356 | create table web_page 357 | ( 358 | wp_web_page_sk integer not null, 359 | wp_web_page_id char(16) not null, 360 | wp_rec_start_date date, 361 | wp_rec_end_date date, 362 | wp_creation_date_sk integer, 363 | wp_access_date_sk integer, 364 | wp_autogen_flag char(1), 365 | wp_customer_sk integer, 366 | wp_url varchar(100), 367 | wp_type char(50), 368 | wp_char_count integer, 369 | wp_link_count integer, 370 | wp_image_count integer, 371 | wp_max_ad_count integer, 372 | primary key (wp_web_page_sk) 373 | ); 374 | 375 | create table promotion 376 | ( 377 | p_promo_sk integer not null, 378 | p_promo_id char(16) not null, 379 | p_start_date_sk integer, 380 | p_end_date_sk integer, 381 | p_item_sk integer, 382 | p_cost decimal(15, 2), 383 | p_response_target integer, 384 | p_promo_name char(50), 385 | p_channel_dmail char(1), 386 | p_channel_email char(1), 387 | p_channel_catalog char(1), 388 | p_channel_tv char(1), 389 | p_channel_radio char(1), 390 | p_channel_press char(1), 391 | p_channel_event char(1), 392 | p_channel_demo char(1), 393 | p_channel_details varchar(100), 394 | p_purpose char(15), 395 | p_discount_active char(1), 396 | primary key (p_promo_sk) 397 | ); 398 | 399 | create table catalog_page 400 | ( 401 | cp_catalog_page_sk integer not null, 402 | cp_catalog_page_id char(16) not null, 403 | cp_start_date_sk integer, 404 | cp_end_date_sk integer, 405 | cp_department varchar(50), 406 | cp_catalog_number integer, 407 | cp_catalog_page_number integer, 408 | cp_description varchar(100), 409 | cp_type varchar(100), 410 | primary key (cp_catalog_page_sk) 411 | ); 412 | 413 | create table inventory 414 | ( 415 | inv_date_sk integer not null, 416 | inv_item_sk integer not null, 417 | inv_warehouse_sk integer not null, 418 | inv_quantity_on_hand integer, 419 | primary key (inv_date_sk, inv_item_sk, inv_warehouse_sk) 420 | ); 421 | 422 | create table catalog_returns 423 | ( 424 | cr_returned_date_sk integer, 425 | cr_returned_time_sk integer, 426 | cr_item_sk integer not null, 427 | cr_refunded_customer_sk integer, 428 | cr_refunded_cdemo_sk integer, 429 | cr_refunded_hdemo_sk integer, 430 | cr_refunded_addr_sk integer, 431 | cr_returning_customer_sk integer, 432 | cr_returning_cdemo_sk integer, 433 | cr_returning_hdemo_sk integer, 434 | cr_returning_addr_sk integer, 435 | cr_call_center_sk integer, 436 | cr_catalog_page_sk integer, 437 | cr_ship_mode_sk integer, 438 | cr_warehouse_sk integer, 439 | cr_reason_sk integer, 440 | cr_order_number integer not null, 441 | cr_return_quantity integer, 442 | cr_return_amount decimal(7, 2), 443 | cr_return_tax decimal(7, 2), 444 | cr_return_amt_inc_tax decimal(7, 2), 445 | cr_fee decimal(7, 2), 446 | cr_return_ship_cost decimal(7, 2), 447 | cr_refunded_cash decimal(7, 2), 448 | cr_reversed_charge decimal(7, 2), 449 | cr_store_credit decimal(7, 2), 450 | cr_net_loss decimal(7, 2), 451 | primary key (cr_item_sk, cr_order_number) 452 | ); 453 | 454 | create table web_returns 455 | ( 456 | wr_returned_date_sk integer, 457 | wr_returned_time_sk integer, 458 | wr_item_sk integer not null, 459 | wr_refunded_customer_sk integer, 460 | wr_refunded_cdemo_sk integer, 461 | wr_refunded_hdemo_sk integer, 462 | wr_refunded_addr_sk integer, 463 | wr_returning_customer_sk integer, 464 | wr_returning_cdemo_sk integer, 465 | wr_returning_hdemo_sk integer, 466 | wr_returning_addr_sk integer, 467 | wr_web_page_sk integer, 468 | wr_reason_sk integer, 469 | wr_order_number integer not null, 470 | wr_return_quantity integer, 471 | wr_return_amt decimal(7, 2), 472 | wr_return_tax decimal(7, 2), 473 | wr_return_amt_inc_tax decimal(7, 2), 474 | wr_fee decimal(7, 2), 475 | wr_return_ship_cost decimal(7, 2), 476 | wr_refunded_cash decimal(7, 2), 477 | wr_reversed_charge decimal(7, 2), 478 | wr_account_credit decimal(7, 2), 479 | wr_net_loss decimal(7, 2), 480 | primary key (wr_item_sk, wr_order_number) 481 | ); 482 | 483 | create table web_sales 484 | ( 485 | ws_sold_date_sk integer, 486 | ws_sold_time_sk integer, 487 | ws_ship_date_sk integer, 488 | ws_item_sk integer not null, 489 | ws_bill_customer_sk integer, 490 | ws_bill_cdemo_sk integer, 491 | ws_bill_hdemo_sk integer, 492 | ws_bill_addr_sk integer, 493 | ws_ship_customer_sk integer, 494 | ws_ship_cdemo_sk integer, 495 | ws_ship_hdemo_sk integer, 496 | ws_ship_addr_sk integer, 497 | ws_web_page_sk integer, 498 | ws_web_site_sk integer, 499 | ws_ship_mode_sk integer, 500 | ws_warehouse_sk integer, 501 | ws_promo_sk integer, 502 | ws_order_number integer not null, 503 | ws_quantity integer, 504 | ws_wholesale_cost decimal(7, 2), 505 | ws_list_price decimal(7, 2), 506 | ws_sales_price decimal(7, 2), 507 | ws_ext_discount_amt decimal(7, 2), 508 | ws_ext_sales_price decimal(7, 2), 509 | ws_ext_wholesale_cost decimal(7, 2), 510 | ws_ext_list_price decimal(7, 2), 511 | ws_ext_tax decimal(7, 2), 512 | ws_coupon_amt decimal(7, 2), 513 | ws_ext_ship_cost decimal(7, 2), 514 | ws_net_paid decimal(7, 2), 515 | ws_net_paid_inc_tax decimal(7, 2), 516 | ws_net_paid_inc_ship decimal(7, 2), 517 | ws_net_paid_inc_ship_tax decimal(7, 2), 518 | ws_net_profit decimal(7, 2), 519 | primary key (ws_item_sk, ws_order_number) 520 | ); 521 | 522 | create table catalog_sales 523 | ( 524 | cs_sold_date_sk integer, 525 | cs_sold_time_sk integer, 526 | cs_ship_date_sk integer, 527 | cs_bill_customer_sk integer, 528 | cs_bill_cdemo_sk integer, 529 | cs_bill_hdemo_sk integer, 530 | cs_bill_addr_sk integer, 531 | cs_ship_customer_sk integer, 532 | cs_ship_cdemo_sk integer, 533 | cs_ship_hdemo_sk integer, 534 | cs_ship_addr_sk integer, 535 | cs_call_center_sk integer, 536 | cs_catalog_page_sk integer, 537 | cs_ship_mode_sk integer, 538 | cs_warehouse_sk integer, 539 | cs_item_sk integer not null, 540 | cs_promo_sk integer, 541 | cs_order_number integer not null, 542 | cs_quantity integer, 543 | cs_wholesale_cost decimal(7, 2), 544 | cs_list_price decimal(7, 2), 545 | cs_sales_price decimal(7, 2), 546 | cs_ext_discount_amt decimal(7, 2), 547 | cs_ext_sales_price decimal(7, 2), 548 | cs_ext_wholesale_cost decimal(7, 2), 549 | cs_ext_list_price decimal(7, 2), 550 | cs_ext_tax decimal(7, 2), 551 | cs_coupon_amt decimal(7, 2), 552 | cs_ext_ship_cost decimal(7, 2), 553 | cs_net_paid decimal(7, 2), 554 | cs_net_paid_inc_tax decimal(7, 2), 555 | cs_net_paid_inc_ship decimal(7, 2), 556 | cs_net_paid_inc_ship_tax decimal(7, 2), 557 | cs_net_profit decimal(7, 2), 558 | primary key (cs_item_sk, cs_order_number) 559 | ); 560 | 561 | create table store_sales 562 | ( 563 | ss_sold_date_sk integer, 564 | ss_sold_time_sk integer, 565 | ss_item_sk integer not null, 566 | ss_customer_sk integer, 567 | ss_cdemo_sk integer, 568 | ss_hdemo_sk integer, 569 | ss_addr_sk integer, 570 | ss_store_sk integer, 571 | ss_promo_sk integer, 572 | ss_ticket_number integer not null, 573 | ss_quantity integer, 574 | ss_wholesale_cost decimal(7, 2), 575 | ss_list_price decimal(7, 2), 576 | ss_sales_price decimal(7, 2), 577 | ss_ext_discount_amt decimal(7, 2), 578 | ss_ext_sales_price decimal(7, 2), 579 | ss_ext_wholesale_cost decimal(7, 2), 580 | ss_ext_list_price decimal(7, 2), 581 | ss_ext_tax decimal(7, 2), 582 | ss_coupon_amt decimal(7, 2), 583 | ss_net_paid decimal(7, 2), 584 | ss_net_paid_inc_tax decimal(7, 2), 585 | ss_net_profit decimal(7, 2), 586 | primary key (ss_item_sk, ss_ticket_number) 587 | ); 588 | -------------------------------------------------------------------------------- /tpcds_metrics_data.csv: -------------------------------------------------------------------------------- 1 | cpu_user,cpu_system,memory_used,memory_free,io_read,io_write,pg_session_count,pg_db_name,pg_xact_commit,pg_xact_rollback,pg_buffers_alloc,pg_buffers_backend,pg_disk_io_table_name,pg_heap_blks_read,pg_heap_blks_hit,pg_cache_hit_ratio,pg_long_running_queries,pg_ungranted_locks 2 | 57.5,5.0,802410496,143024128,103491,105635,19,,56933,101,126625929,35218333,customer_address,209940,1713641,0.44817456623270655232,0,0 3 | 20.5,1.1,819634176,205193216,109609,105900,17,,56933,101,127030138,35218334,customer_address,209940,1713641,0.44974472133678340334,0,0 4 | 20.6,1.9,827731968,278052864,121409,106167,17,,56933,101,127797520,35218334,customer_address,224778,1772652,0.44994078298656292157,0,0 5 | 32.86666666666667,2.6666666666666665,816592213.3333334,208756736.0,111503.0,105900.66666666667,17.666666666666668,,56933.0,101.0,127151195.66666667,35218333.666666664,customer_address,214886.0,1733311.3333333333,0.44928669018535095,0.0,0.0 6 | -------------------------------------------------------------------------------- /tpcds_metrics_data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binbjz/tpcds_pg/6a9c0ab3406e48470c9fe42305b9c30028068b3f/tpcds_metrics_data.png -------------------------------------------------------------------------------- /tpcds_metrics_subplotter.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | from typing import Optional 4 | from pathlib import Path 5 | from loguru import logger 6 | 7 | 8 | def plot_metrics_from_csv_chunked_continuous(csv_path: str, ncols: int, chunk_size: int = 5000, 9 | output_dir: Optional[Path] = None, 10 | output_image_name: Optional[str] = None): 11 | """ 12 | 从给定的 CSV 文件路径中分块读取数据,并且绘制数据指标的连续图表。 13 | 使用全局索引偏移量来在 x 轴上实现连续性。 14 | """ 15 | rows_read, total_rows = 0, sum(1 for _ in open(csv_path)) - 1 16 | 17 | for chunk in pd.read_csv(csv_path, chunksize=chunk_size): 18 | if rows_read + len(chunk) >= total_rows: 19 | last_row = chunk.iloc[-1] 20 | chunk = chunk.iloc[:-1] 21 | logger.info("最后一行数据:\n{}".format(last_row)) 22 | 23 | numeric_cols_chunk = chunk.select_dtypes(include=["number"]).dropna(axis=1) 24 | if len(numeric_cols_chunk.columns) == 0: 25 | logger.info("没有数值型列可供绘图。") 26 | continue 27 | 28 | axes = None 29 | if rows_read == 0: 30 | num_columns = len(numeric_cols_chunk.columns) 31 | nrows = num_columns // ncols + (num_columns % ncols > 0) 32 | fig, axes = plt.subplots(nrows=nrows, ncols=ncols, figsize=(20, 6 * nrows)) 33 | 34 | for i, col in enumerate(numeric_cols_chunk.columns): 35 | ax = axes.flatten()[i] 36 | x_ticks = range(rows_read, rows_read + len(chunk)) 37 | ax.plot(x_ticks, numeric_cols_chunk[col], alpha=0.5) 38 | ax.set_title(col, fontsize=12) 39 | 40 | rows_read += len(chunk) 41 | 42 | plt.tight_layout() 43 | save_path = (output_dir if output_dir else Path(".")).joinpath( 44 | output_image_name if output_image_name else "metrics_data_continuous.png") 45 | plt.savefig(save_path, dpi=300) 46 | logger.info(f"指标分析图已保存至 {save_path}") 47 | plt.show() 48 | 49 | 50 | if __name__ == "__main__": 51 | _csv_file = "tpcds_metrics_data.csv" 52 | plot_metrics_from_csv_chunked_continuous(_csv_file, ncols=2) 53 | -------------------------------------------------------------------------------- /update_query_templates.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | directory="/home/parallels/prac_bin/TPC-DS-Tool_v3.2.0/DSGen-software-code-3.2.0rc1/query_templates" 4 | 5 | # ========================================================== 6 | # 函数: update_query_templates_add 7 | # 描述: 在文件末尾添加一行指定的文本 8 | # 注意:.tpl中已经加入了 define _BEGIN 和 define _END, 9 | # 所以这里不需要在每个query tpl文件末尾 添加 define _END = ""了。 10 | # ========================================================== 11 | update_query_templates_add() { 12 | local text_to_add="define _END = \"\"" 13 | for i in $(seq 1 99); do 14 | file="${directory}/query${i}.tpl" 15 | if [ -f "$file" ]; then 16 | echo "$text_to_add" >>"$file" 17 | echo "已经在 $file 的行尾添加了文本" 18 | else 19 | echo "$file 不存在" 20 | fi 21 | done 22 | } 23 | 24 | # ========================================================== 25 | # 函数: update_query_templates_remove 26 | # 描述: 从文件末尾删除一行(添加的指定文本) 27 | # ========================================================== 28 | update_query_templates_remove() { 29 | for i in $(seq 1 99); do 30 | file="${directory}/query${i}.tpl" 31 | if [ -f "$file" ]; then 32 | sed -i '$ d' "$file" 33 | echo "已经从 $file 的末尾删除了文本" 34 | else 35 | echo "$file 不存在" 36 | fi 37 | done 38 | } 39 | 40 | # 执行函数 41 | update_query_templates_add 42 | update_query_templates_remove 43 | --------------------------------------------------------------------------------