├── .github └── workflows │ ├── ci.yml │ └── docker-release.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── bin └── syncany-sql ├── config.yaml.example ├── docs ├── README.md ├── configure.md ├── driver-dependency.md ├── feature-restrictions.md ├── functions.md └── 使用教程 │ ├── 1、使用介绍.md │ ├── 2、安装和配置.md │ ├── 3、类型注解和类型转换.md │ └── 4、添加使用自定义函数.md ├── examples ├── README.md ├── aggregate │ ├── README.md │ ├── aggregate.sql │ ├── aggregate_batch.sql │ ├── aggregate_customize.py │ ├── data │ │ ├── goodses.json │ │ ├── order_historys.json │ │ ├── orders.json │ │ └── users.json │ └── window_aggregate.sql ├── datetime │ ├── README.md │ └── datetime.sql ├── demo │ ├── README.md │ ├── data │ │ ├── demo.json │ │ ├── orders.json │ │ └── sites.json │ ├── demo.sql │ ├── demo2.sql │ ├── execute.sql │ └── json │ │ ├── database.json │ │ ├── demo.json │ │ └── log.json ├── functions │ └── generate.sql ├── get_value │ ├── README.md │ ├── data │ │ └── data.json │ └── get_value.sql ├── import_python │ ├── README.md │ ├── config.yaml │ ├── import_python.sql │ ├── import_python2.sql │ ├── init.sql │ ├── syncany_ext.py │ ├── util_helpers.py │ └── utils.py ├── insert_types │ ├── README.md │ ├── delete_insert.sql │ ├── insert.sql │ ├── update.sql │ ├── update_delete_insert.sql │ └── update_insert.sql ├── joins │ ├── data │ │ ├── devices.json │ │ ├── goodses.json │ │ ├── order_historys.json │ │ ├── orders.json │ │ ├── services.json │ │ └── users.json │ ├── inner_join.sql │ ├── left_join.sql │ └── right_join.sql ├── json │ ├── README.md │ └── json.sql ├── logic_operation │ ├── data │ │ └── orders.json │ └── logic_operation.sql ├── loop │ └── loop.sql ├── mathematical │ ├── README.md │ └── mathematical.sql ├── memory_temporary_storage │ ├── README.md │ ├── data │ │ └── user.json │ └── memory_temporary_storage.sql ├── nginx-log │ ├── README.md │ ├── data │ │ └── access.log │ └── ip-top-3.sql ├── parameter_variable │ ├── README.md │ ├── data │ │ └── orders.json │ ├── parameter_assign.sql │ └── parameter_variable.sql ├── pyeval │ ├── README.md │ └── pyeval.sql ├── raw_query │ ├── README.md │ └── raw_query.sql ├── strings │ ├── README.md │ └── strings.sql ├── subquery │ ├── data │ │ ├── goodses.json │ │ ├── order_historys.json │ │ ├── orders.json │ │ └── users.json │ └── subquery.sql ├── time_window │ ├── README.md │ ├── data │ │ └── order.csv │ └── time_window.sql ├── transform │ ├── README.md │ ├── data │ │ ├── data.json │ │ └── sites.json │ ├── transform_customize.py │ ├── transform_customize.sql │ ├── transform_h2v.sql │ ├── transform_h4v.sql │ ├── transform_uniqkv.sql │ ├── transform_v2h.sql │ └── transform_v4h.sql ├── type_annotation │ ├── README.md │ ├── data │ │ └── data.json │ ├── type_annotation.sql │ └── type_declaration_cast.sql ├── window_aggregate │ ├── data │ │ ├── goodses.json │ │ ├── orders.json │ │ └── users.json │ ├── window.sql │ └── window_customize.py └── yield_data │ ├── README.md │ ├── generate_customize.py │ └── yield_data.sql ├── requirements.txt ├── setup.cfg ├── setup.py ├── syncanysql ├── __init__.py ├── calculaters │ ├── __init__.py │ ├── aggregate_calculater.py │ ├── env_variable_calculater.py │ ├── generate_calculater.py │ ├── mysql_calculater.py │ ├── mysql_funcs │ │ ├── __init__.py │ │ ├── datetime_funcs.py │ │ ├── json_funcs.py │ │ ├── logical_funcs.py │ │ ├── number_funcs.py │ │ ├── regexp_funcs.py │ │ └── string_funcs.py │ ├── pyeval_calculater.py │ ├── query_tasker_calculater.py │ ├── row_calculater.py │ └── window_calculater.py ├── compiler.py ├── config.py ├── errors.py ├── executor.py ├── main.py ├── parser.py ├── prompt.py ├── taskers │ ├── __init__.py │ ├── delete.py │ ├── execute.py │ ├── explain.py │ ├── into.py │ ├── query.py │ ├── set.py │ ├── show.py │ └── use.py ├── utils.py └── version.py └── tests ├── __init__.py ├── example.py ├── test_example_aggregate.py ├── test_example_datetime.py ├── test_example_demo.py ├── test_example_functions.py ├── test_example_get_value.py ├── test_example_import_python.py ├── test_example_insert_types.py ├── test_example_joins.py ├── test_example_json.py ├── test_example_logic_operation.py ├── test_example_loop.py ├── test_example_mathematical.py ├── test_example_memory_temporary_storage.py ├── test_example_nginx_log.py ├── test_example_parameter_variable.py ├── test_example_pyeval.py ├── test_example_strings.py ├── test_example_subquery.py ├── test_example_time_window.py ├── test_example_transform.py ├── test_example_type_annotation.py ├── test_example_window_aggregate.py ├── test_example_yield_data.py └── test_script_engine.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: CI 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v3 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install --upgrade pip 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | pip install git+https://github.com/snower/syncany.git#egg=syncany 32 | - name: Test 33 | run: | 34 | pytest --cov=syncany --cov=syncanysql --without-integration --without-slow-integration 35 | -------------------------------------------------------------------------------- /.github/workflows/docker-release.yml: -------------------------------------------------------------------------------- 1 | name: Docker Release 2 | 3 | on: 4 | release: 5 | types: [ published, edited ] 6 | 7 | workflow_dispatch: 8 | inputs: 9 | no_cache: 10 | type: boolean 11 | description: 'Build from scratch, without using cached layers' 12 | 13 | env: 14 | IMAGE_NAME: syncany-sql 15 | DEPLOY_IMAGE_NAME: ${{ secrets.DOCKER_USERNAME }}/syncany-sql 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout repository 22 | uses: actions/checkout@v3 23 | 24 | - name: Log in to Docker hub 25 | uses: docker/login-action@v2 26 | with: 27 | username: ${{ secrets.DOCKER_USERNAME }} 28 | password: ${{ secrets.DOCKER_PASSWORD }} 29 | 30 | - name: Set up Docker Buildx 31 | uses: docker/setup-buildx-action@v2 32 | 33 | # slashes are not allowed in image tags, but can appear in git branch or tag names 34 | - id: sanitize_tag 35 | name: Sanitize image tag 36 | run: echo tag=${raw_tag//\//-} >> $GITHUB_OUTPUT 37 | env: 38 | raw_tag: ${{ github.ref_name }} 39 | 40 | - id: build 41 | name: Build image 42 | uses: docker/build-push-action@v3 43 | with: 44 | build-args: BUILD_TYPE=release 45 | load: true # save to docker images 46 | # push: true # TODO: uncomment when this issue is fixed: https://github.com/moby/buildkit/issues/1555 47 | tags: > 48 | ${{ env.IMAGE_NAME }}, 49 | ${{ env.DEPLOY_IMAGE_NAME }}:latest, 50 | ${{ env.DEPLOY_IMAGE_NAME }}:${{ steps.sanitize_tag.outputs.tag }} 51 | 52 | # cache layers in GitHub Actions cache to speed up builds 53 | cache-from: ${{ !inputs.no_cache && 'type=gha' || '' }},scope=docker-release 54 | cache-to: type=gha,scope=docker-release,mode=max 55 | 56 | - name: Push image to Docker Hub 57 | run: docker push --all-tags ${{ env.DEPLOY_IMAGE_NAME }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | .idea -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim 2 | 3 | WORKDIR /data 4 | 5 | RUN apt-get update && apt-get install -y ca-certificates git 6 | 7 | COPY requirements.txt /root 8 | 9 | RUN cd /root && \ 10 | sed -i '/Items below this point will not be included in the Docker Image/,$d' requirements.txt && \ 11 | python -m pip install --upgrade pip && \ 12 | pip install --no-cache-dir -r requirements.txt && \ 13 | pip install git+https://github.com/snower/syncany.git#egg=syncany && \ 14 | pip install git+https://github.com/snower/syncany-sql.git#egg=syncanysql 15 | 16 | CMD syncany-sql 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 snower 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Syncany-SQL 2 | [![Tests](https://img.shields.io/github/actions/workflow/status/snower/syncany-sql/ci.yml?label=tests)](https://github.com/snower/syncany-sql/actions/workflows/ci.yml) 3 | [![GitHub Repo stars](https://img.shields.io/github/stars/snower/syncany-sql?style=social)](https://github.com/snower/syncany-sql/stargazers) 4 | 5 | 简单易用的SQL执行引擎。 6 | 7 | - 可在本地运行MySQL语法结构的SQL 8 | - 支持查询常用mysql、mongodb、postgresql、sqlserver、elasticsearch、influxdb、clickhouse、sqlite数据库及execl、csv、json和普通文本文件 9 | - 支持本地临时数据表逻辑做中间结果保存 10 | - 数据库数据加载使用简单条件过滤及IN条件查询 11 | - 因由本地完成Join匹配所以支持不同库表、不同主机及不同类型数据库间Join关联查询 12 | - Group By分组聚合计算及Order By排序也由本地执行,保证数据库安全性 13 | - 数据写Insert Into支持 ”仅插入 I“、”存在更新否则插入 UI“、”存在更新否则插入其余删除 UDI“、”删除后插入 DI“四种模式 14 | - 大数据量支持批次执行,有Group By或Having条件过滤自动执行Reduce合并结果 15 | - 支持流式执行 16 | 17 | ----- 18 | 19 | - [安装](#安装) 20 | - [特性与限制](docs/feature-restrictions.md) 21 | - [配置详解](docs/configure.md) 22 | - [驱动依赖](docs/driver-dependency.md) 23 | - [示例详解](examples) 24 | - [内置函数](docs/functions.md) 25 | - [使用教程](docs/使用教程/) 26 | 27 | ## 安装 28 | 29 | ```bash 30 | pip3 install syncanysql 31 | ``` 32 | 33 | #### Docker 34 | 35 | ```bash 36 | docker pull sujin190/syncany-sql 37 | ``` 38 | 39 | ## 查询Nginx日志 40 | 41 | ```sql 42 | -- 查询访问量最高的三个IP 43 | SELECT seg0 AS ip, COUNT(*) AS cnt FROM `file://data/access.log?sep= ` GROUP BY seg0 ORDER BY cnt DESC LIMIT 3; 44 | ``` 45 | 46 | ## 查询JSON文件 47 | 48 | ```sql 49 | SELECT 50 | a.site_id, 51 | b.name AS site_name, 52 | IF(c.site_amount > 0, c.site_amount, 0) AS site_amount, 53 | MAX(a.timeout_at) AS timeout_at, 54 | MAX(a.vip_timeout_at) AS vip_timeout_at, 55 | now() as `created_at?` 56 | FROM 57 | (SELECT 58 | YIELD_ARRAY(sites) AS site_id, 59 | IF(vip_type = '2', GET_VALUE(rules, 0, 'timeout_time'), '') AS timeout_at, 60 | IF(vip_type = '1', GET_VALUE(rules, 0, 'timeout_time'), '') AS vip_timeout_at 61 | FROM 62 | `data/demo.json` 63 | WHERE 64 | start_date >= '2021-01-01') a 65 | JOIN 66 | `data/sites.json` b ON a.site_id = b.site_id 67 | JOIN 68 | (SELECT 69 | site_id, SUM(amount) AS site_amount 70 | FROM 71 | `data/orders.json` 72 | WHERE 73 | status <= 0 74 | GROUP BY site_id) c ON a.site_id = c.site_id 75 | GROUP BY a.site_id; 76 | ``` 77 | 78 | # Python API 79 | 80 | ```python 81 | from syncanysql import ScriptEngine 82 | 83 | with ScriptEngine() as engine: 84 | engine.execute(''' 85 | INSERT INTO `top_ips` SELECT 86 | ip, cnt 87 | FROM 88 | (SELECT 89 | seg0 AS ip, COUNT(*) AS cnt 90 | FROM 91 | `file:///var/log/nginx/access.log?sep= ` 92 | GROUP BY seg0) a 93 | ORDER BY cnt DESC 94 | LIMIT 3; 95 | ''') 96 | print(engine.pop_memory_datas("top_ips")) 97 | ``` 98 | 99 | # License 100 | 101 | Syncany-SQL uses the MIT license, see LICENSE file for the details. -------------------------------------------------------------------------------- /bin/syncany-sql: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # -*- coding: utf-8 -*- 4 | # 2023/2/7 5 | # create by: snower 6 | 7 | import os 8 | import sys 9 | 10 | sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) 11 | 12 | from syncanysql.main import main 13 | 14 | if __name__ == "__main__": 15 | main() -------------------------------------------------------------------------------- /config.yaml.example: -------------------------------------------------------------------------------- 1 | # syncany-sql配置文件定义全局日志、时区、编码格式及数据库参数定义和全局导入包信息 2 | # 支持json和yaml格式配置文件 3 | # 默认加载当前目录的"./config.[json|yaml]”和当前用户目录下"~/.syncany/config.[json|yaml]"文件 4 | # 当前目录配置文件优先级高于用户目录,合并配置项后为最终加载配置 5 | 6 | # 日志文件地址,未配置、配置空字符串或'-'都为标准输出 7 | logfile: '-' 8 | # 日志格式,请参照Python标准库logging配置 9 | logformat: '' 10 | # 日志输出级别 CRITICAL ERROR WARNING INFO DEBUG,默认INFO 11 | loglevel: 'INFO' 12 | 13 | # 文件编码,默认utf-8 14 | encoding: 'utf-8' 15 | # 默认日期时间格式化格式,请参照Python标准库datetime设置日期时间输出格式 16 | datetime_format: '%Y-%m-%d %H:%M:%S' 17 | # 默认日期格式化格式,请参照Python标准库datetime设置日期输出格式 18 | date_format: '%Y-%m-%d' 19 | # 默认时间格式化格式,请参照Python标准库datetime设置时间输出格式 20 | time_format: '%H:%M:%S' 21 | 22 | # 数据库配置信息 23 | databases: 24 | - name: mysql_example # MySQL 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/PyMySQL/PyMySQL 配置 25 | driver: mysql 26 | host: '127.0.0.1' 27 | port: 3306 28 | user: 'root' 29 | passwd: '123456' 30 | db: 'example' 31 | charset: 'utf8mb4' 32 | 33 | - name: mongo_example # MongoDB 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/mongodb/mongo-python-driver 配置 34 | driver: mongo 35 | host: "127.0.0.1" 36 | port: 27017 37 | username: 'admin' 38 | password: '123456' 39 | authSource: 'admin' 40 | db: 'example' 41 | 42 | - name: postgresql_example # PostgreSQL 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/psycopg/psycopg2 配置 43 | driver: postgresql 44 | host: "127.0.0.1" 45 | port: 5432 46 | username: 'user' 47 | password: '123456' 48 | dbname: 'example' 49 | 50 | - name: sqlserver_example # Microsoft SQL Server 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/pymssql/pymssql 配置 51 | driver: sqlserver 52 | host: '127.0.0.1' 53 | port: 1433 54 | user: 'sa' 55 | password: '123456' 56 | database: 'example' 57 | charset: 'utf8' 58 | 59 | - name: clickhouse_example # ClickHouse 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/mymarilyn/clickhouse-driver 配置 60 | driver: clickhouse 61 | host: "127.0.0.1" 62 | port: 9000 63 | username: 'default' 64 | password: '123456' 65 | database: 'example' 66 | 67 | - name: influxdb_example # InfluxDB 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/influxdata/influxdb-python 配置 68 | driver: influxdb 69 | host: "127.0.0.1" 70 | port: 8086 71 | username: 'root' 72 | password: '123456' 73 | database: 'example' 74 | 75 | - name: elasticsearch_example # Elasticsearch 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/elastic/elasticsearch-py 配置 76 | driver: elasticsearch 77 | hosts: "http://localhost:9200" 78 | 79 | - name: sqlite_example # SQLite 示例参数,除name和driver是专有参数,其余连接参数可参照 https://docs.python.org/3/library/sqlite3.html 配置 80 | driver: sqlite 81 | database: ':memory:' 82 | 83 | # 配置导入扩展,可用于注册自定义database driver、自定义普通函数或自定义聚合函数及其他扩展功能 84 | # extensions: 85 | # - myext 86 | 87 | # 配置全局导入包,也可在SQL中用"use"指令导入 88 | #imports: 89 | # np: numpy # 导入numpy到别名numpy,可在SQL中"np$array()"使用 90 | 91 | # 执行初始化SQL脚本 92 | #executes: 93 | # - init.sql # 执行当前目录下init.sql初始化脚本,如果该脚本在用户目录下则可用“${HOME}/init.sql”,如在syncany配置目录下则可为“${SYNCANY_HOME}/init.sql” -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Syncany-SQL 2 | 3 | 简单易用的SQL执行引擎。 4 | 5 | - 可在本地运行MySQL语法结构的SQL 6 | - 支持查询常用mysql、mongodb、postgresql、sqlserver、elasticsearch、influxdb、clickhouse、sqlite数据库及execl、csv、json和普通文本文件 7 | - 支持本地临时数据表逻辑做中间结果保存 8 | - 数据库数据加载使用简单条件过滤及IN条件查询 9 | - 因由本地完成Join匹配所以支持不同库表、不同主机及不同类型数据库间Join关联查询 10 | - Group By分组聚合计算及Order By排序也由本地执行,保证数据库安全性 11 | - 数据写Insert Into支持 ”仅插入 I“、”存在更新否则插入 UI“、”存在更新否则插入其余删除 UDI“、”删除后插入 DI“四种模式 12 | - 大数据量支持批次执行,有Group By或Having条件过滤自动执行Reduce合并结果 13 | - 支持流式执行 14 | 15 | ----- 16 | 17 | - [安装](#安装) 18 | - [特性与限制](feature-restrictions.md) 19 | - [配置详解](configure.md) 20 | - [驱动依赖](driver-dependency.md) 21 | - [示例详解](../examples) 22 | - [内置函数](functions.md) 23 | 24 | ## 安装 25 | 26 | ``` 27 | pip3 install syncanysql 28 | ``` 29 | 30 | ## 查询Nginx日志 31 | 32 | ```sql 33 | -- 查询访问量最高的三个IP 34 | SELECT seg0 AS ip, COUNT(*) AS cnt FROM `file://data/access.log?sep= ` GROUP BY seg0 ORDER BY cnt DESC LIMIT 3; 35 | ``` 36 | 37 | ## 查询JSON文件 38 | 39 | ```sql 40 | SELECT 41 | a.site_id, 42 | b.name AS site_name, 43 | IF(c.site_amount > 0, c.site_amount, 0) AS site_amount, 44 | MAX(a.timeout_at) AS timeout_at, 45 | MAX(a.vip_timeout_at) AS vip_timeout_at, 46 | now() as `created_at?` 47 | FROM 48 | (SELECT 49 | YIELD_ARRAY(sites) AS site_id, 50 | IF(vip_type = '2', GET_VALUE(rules, 0, 'timeout_time'), '') AS timeout_at, 51 | IF(vip_type = '1', GET_VALUE(rules, 0, 'timeout_time'), '') AS vip_timeout_at 52 | FROM 53 | `data/demo.json` 54 | WHERE 55 | start_date >= '2021-01-01') a 56 | JOIN 57 | `data/sites.json` b ON a.site_id = b.site_id 58 | JOIN 59 | (SELECT 60 | site_id, SUM(amount) AS site_amount 61 | FROM 62 | `data/orders.json` 63 | WHERE 64 | status <= 0 65 | GROUP BY site_id) c ON a.site_id = c.site_id 66 | GROUP BY a.site_id; 67 | ``` 68 | 69 | # License 70 | 71 | Syncany-SQL uses the MIT license, see LICENSE file for the details. -------------------------------------------------------------------------------- /docs/configure.md: -------------------------------------------------------------------------------- 1 | # 配置示例 2 | 3 | ```yaml 4 | # syncany-sql配置文件定义全局日志、时区、编码格式及数据库参数定义和全局导入包信息 5 | # 支持json和yaml格式配置文件 6 | # 默认加载当前目录的"./config.[json|yaml]”和当前用户目录下"~/.syncany/config.[json|yaml]"文件 7 | # 当前目录配置文件优先级高于用户目录,合并配置项后为最终加载配置 8 | 9 | # 日志文件地址,未配置、配置空字符串或'-'都为标准输出 10 | logfile: '-' 11 | # 日志格式,请参照Python标准库logging配置 12 | logformat: '' 13 | # 日志输出级别 CRITICAL ERROR WARNING INFO DEBUG,默认INFO 14 | loglevel: 'INFO' 15 | 16 | # 文件编码,默认utf-8 17 | encoding: 'utf-8' 18 | # 默认日期时间格式化格式,请参照Python标准库datetime设置日期时间输出格式 19 | datetime_format: '%Y-%m-%d %H:%M:%S' 20 | # 默认日期格式化格式,请参照Python标准库datetime设置日期输出格式 21 | date_format: '%Y-%m-%d' 22 | # 默认时间格式化格式,请参照Python标准库datetime设置时间输出格式 23 | time_format: '%H:%M:%S' 24 | 25 | # 数据库配置信息 26 | databases: 27 | - name: mysql_example # MySQL 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/PyMySQL/PyMySQL 配置 28 | driver: mysql 29 | host: '127.0.0.1' 30 | port: 3306 31 | user: 'root' 32 | passwd: '123456' 33 | db: 'example' 34 | charset: 'utf8mb4' 35 | 36 | - name: mongo_example # MongoDB 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/mongodb/mongo-python-driver 配置 37 | driver: mongo 38 | host: "127.0.0.1" 39 | port: 27017 40 | username: 'admin' 41 | password: '123456' 42 | authSource: 'admin' 43 | db: 'example' 44 | 45 | - name: postgresql_example # PostgreSQL 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/psycopg/psycopg2 配置 46 | driver: postgresql 47 | host: "127.0.0.1" 48 | port: 5432 49 | username: 'user' 50 | password: '123456' 51 | dbname: 'example' 52 | 53 | - name: sqlserver_example # Microsoft SQL Server 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/pymssql/pymssql 配置 54 | driver: sqlserver 55 | host: '127.0.0.1' 56 | port: 1433 57 | user: 'sa' 58 | password: '123456' 59 | database: 'example' 60 | charset: 'utf8' 61 | 62 | - name: clickhouse_example # ClickHouse 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/mymarilyn/clickhouse-driver 配置 63 | driver: clickhouse 64 | host: "127.0.0.1" 65 | port: 9000 66 | username: 'default' 67 | password: '123456' 68 | database: 'example' 69 | 70 | - name: influxdb_example # InfluxDB 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/influxdata/influxdb-python 配置 71 | driver: influxdb 72 | host: "127.0.0.1" 73 | port: 8086 74 | username: 'root' 75 | password: '123456' 76 | database: 'example' 77 | 78 | - name: elasticsearch_example # Elasticsearch 示例参数,除name和driver是专有参数,其余连接参数可参照 https://github.com/elastic/elasticsearch-py 配置 79 | driver: elasticsearch 80 | hosts: "http://localhost:9200" 81 | 82 | - name: sqlite_example # SQLite 示例参数,除name和driver是专有参数,其余连接参数可参照 https://docs.python.org/3/library/sqlite3.html 配置 83 | driver: sqlite 84 | database: ':memory:' 85 | 86 | # 配置导入扩展,可用于注册自定义database driver、自定义普通函数或自定义聚合函数及其他扩展功能 87 | # extensions: 88 | # - myext 89 | 90 | # 配置全局导入包,也可在SQL中用"use"指令导入 91 | #imports: 92 | # np: numpy # 导入numpy到别名numpy,可在SQL中"np$array()"使用 93 | 94 | # 执行初始化SQL脚本 95 | #executes: 96 | # - init.sql # 执行当前目录下init.sql初始化脚本,如果该脚本在用户目录下则可用“${HOME}/init.sql”,如在syncany配置目录下则可为“${SYNCANY_HOME}/init.sql” 97 | ``` -------------------------------------------------------------------------------- /docs/driver-dependency.md: -------------------------------------------------------------------------------- 1 | # 依赖驱动 2 | 3 | 默认不安装数据库驱动,需要查询读写对应类型数据库或文件时需要自行安装。 4 | 5 | - [pymongo>=3.6.1](https://github.com/mongodb/mongo-python-driver) 6 | - [PyMySQL>=0.8.1](https://github.com/PyMySQL/PyMySQL) 7 | - [openpyxl>=2.5.0](https://github.com/theorchard/openpyxl) 8 | - [psycopg2>=2.8.6](https://github.com/psycopg/psycopg2) 9 | - [elasticsearch>=6.3.1](https://github.com/elastic/elasticsearch-py) 10 | - [influxdb>=5.3.1](https://github.com/influxdata/influxdb-python) 11 | - [clickhouse_driver>=0.1.5](https://github.com/mymarilyn/clickhouse-driver) 12 | - [redis>=3.5.3](https://github.com/redis/redis-py) 13 | - [pymssql>=2.2.7](https://github.com/pymssql/pymssql) -------------------------------------------------------------------------------- /docs/feature-restrictions.md: -------------------------------------------------------------------------------- 1 | # 支持的特性 2 | 3 | - Join关联查询,支持在不同库表、不同机器和不同数据库类型之间Join查询 4 | - Having在内存中执行,支持复杂条件过滤,只包含聚合计算字段过滤时运行在聚合运算之前,否则在聚合计算之后执行 5 | - 完全在内存中完成Group By分组聚合计算和distinct去重聚合运算 6 | - Order By排序,如果排序完全是主表字段则使用数据库完成,否则内存中完成排序 7 | - 支持子查询 8 | - Where或join on条件支持简单子查询(仅支持and,比较操作符仅支持==、!=、>、>=、<、<=、in),其条件会被发送到数据库执行 9 | - Insert Into除支持正常写入数据库表外可直接写入到execl、json和csv中 10 | - Insert Into支持指定数据合并类型,共支持4中合并算法 (插入 I,更新否则插入 UI,更新否则插入其余删除 UDI,先删除再插入 DI) 11 | - 可以在设定每批数量后支持按批次执行 12 | - WHere、Join on和Having条件值都支持子查询返回 13 | - 支持set设置变量值,支持select info给变量赋值,执行访问变量 14 | - 支持使用常用MySQL函数 15 | 16 | 17 | ``` 18 | 注意!!!部分SQL不支持或不能正确执行受限于数据加载过程,因依据数据加载基本逻辑编写合理SQL: 19 | 1、为简化数据库访问接口,每种数据库实现仅支持简单查询条件和排序来加载数据 20 | 2、JOIN关联查询使用IN查询从数据库加载数据后在内存中完成计算 21 | ``` -------------------------------------------------------------------------------- /docs/functions.md: -------------------------------------------------------------------------------- 1 | # 支持的函数 2 | 3 | - [内置函数](#内置函数) 4 | - [支持的MySQL常用函数](#支持的MySQL常用函数) 5 | - [聚合函数](#聚合函数) 6 | - [窗口函数](#窗口函数) 7 | - [YIELD函数](#YIELD函数) 8 | - [transform转换函数](#transform转换函数) 9 | - [时间窗口函数](#时间窗口函数) 10 | 11 | ## 内置函数 12 | 13 | - type(expr) 14 | - is_null(expr) 15 | - is_int(expr) 16 | - is_float(expr) 17 | - is_decimal(expr) 18 | - is_number(expr) 19 | - is_string(expr) 20 | - is_bytes(expr) 21 | - is_bool(expr) 22 | - is_array(expr) 23 | - is_set(expr) 24 | - is_map(expr) 25 | - is_objectid(expr) 26 | - is_uuid(expr) 27 | - is_datetime(expr) 28 | - is_date(expr) 29 | - is_time(expr) 30 | - convert_int(expr) 31 | - convert_float(expr) 32 | - convert_decimal(expr) 33 | - convert_string(expr) 34 | - convert_bytes(expr) 35 | - convert_bool(expr) 36 | - convert_array(expr) 37 | - convert_set(expr) 38 | - convert_map(expr) 39 | - convert_objectid(expr) 40 | - convert_uuid(expr) 41 | - convert_datetime(expr) 42 | - convert_date(expr) 43 | - convert_time(expr) 44 | - range() 45 | - substring() 46 | - split() 47 | - join() 48 | - now() 49 | - current_env_variable() 50 | - objectid 51 | - uuid 52 | - snowflakeid 53 | 54 | ## 支持的MySQL常用函数 55 | 56 | - bitwiseand(x, y) 57 | - bitwiseor(x, y) 58 | - bitwisenot(x) 59 | - bitwisexor(x, y) 60 | - bitwiserightshift(x, y) 61 | - bitwiseleftshift(x, y) 62 | - abs(x) 63 | - sqrt(x) 64 | - exp(x) 65 | - pi() 66 | - ln(x) 67 | - log(x, base) 68 | - ceil(x) 69 | - ceiling(x) 70 | - floor(x) 71 | - rand() 72 | - round(x, y) 73 | - sign(x) 74 | - pow(x, y) 75 | - power(x, y) 76 | - sin(x) 77 | - asin(x) 78 | - cos(x) 79 | - acos(x) 80 | - tan(x) 81 | - atan(x) 82 | - greatest(expr, [expr, expr, ...]) 83 | - least(expr, [expr, expr, ...]) 84 | - bin(x) 85 | - hex(x) 86 | - unhex(x) 87 | - oct(x) 88 | - ord(x) 89 | - ascii(s) 90 | - char(expr, [expr, expr, ...]) 91 | - bit_length(s) 92 | - length(s) 93 | - char_length(s) 94 | - character_length(s) 95 | - concat(expr, [expr, expr, ...]) 96 | - concat_ws(sep) 97 | - insert(s1, x, l, s2) 98 | - lower(s) 99 | - upper(s) 100 | - ucase(s) 101 | - left(s, x) 102 | - right(s, x) 103 | - trim(s) 104 | - elt(n) 105 | - field(s) 106 | - find_in_set(s, ss) 107 | - replace(s, s1, s2) 108 | - substring(s, n, l) 109 | - substr(s, n, l) 110 | - substring_index(s, d, c) 111 | - repeat(s, c) 112 | - reverse(s) 113 | - strcmp(s1, s2) 114 | - startswith(s1, s2) 115 | - endswith(s1, s2) 116 | - contains(s1, s2) 117 | - crc32(s) 118 | - from_base64(s) 119 | - to_base64(s) 120 | - inet4_aton(s) 121 | - inet4_ntoa(b) 122 | - is_ipv4(s) 123 | - inet6_aton(s) 124 | - inet6_ntoa(b) 125 | - is_ipv6(s) 126 | - currenttimestamp() 127 | - curdate() 128 | - currentdate() 129 | - curtime() 130 | - currenttime() 131 | - sysdate() 132 | - date(dt) 133 | - datetime(dt) 134 | - time(dt) 135 | - unix_timestamp(dt) 136 | - from_unixtime(t) 137 | - month(dt) 138 | - monthname(dt) 139 | - dayname(dt) 140 | - dayofweek(dt) 141 | - week(dt, mod) 142 | - yearweek(dt, mod) 143 | - dayofyear(dt) 144 | - dayofmonth(dt) 145 | - year(dt) 146 | - time_to_sec(dt) 147 | - sec_to_time(t) 148 | - dateadd(dt, i) 149 | - adddate(dt, i) 150 | - datesub(dt, i) 151 | - subdate(dt, i) 152 | - addtime(dt, i) 153 | - subtime(dt, i) 154 | - datediff(dt1, dt2) 155 | - date_format(dt, f) 156 | - time_format(dt, f) 157 | - weekday(dt) 158 | - utc_date() 159 | - utc_time() 160 | - utc_timestamp() 161 | - json_contains(target, candidate, path) 162 | - json_contains_path(json_doc, one_or_all) 163 | - json_extract(json_doc) 164 | - json_depth(json_doc) 165 | - json_keys(json_doc, path) 166 | - json_length(json_doc, path) 167 | - json_valid(val) 168 | 169 | ## 聚合函数 170 | 171 | - count([distinct] expr) 172 | - sum(expr) 173 | - max(expr) 174 | - min(expr) 175 | - avg(expr) 176 | - group_concat(expr) 177 | - group_array(expr) 178 | - group_uniq_array(expr) 179 | - group_bit_and(expr) 180 | - group_bit_or(expr) 181 | - group_bit_xor(expr) 182 | 183 | ## 窗口函数 184 | 185 | - row_number(expr) 186 | - rank(expr) 187 | - dense_rank(expr) 188 | - percent_rank(expr) 189 | - cume_dist(expr) 190 | 191 | ## YIELD函数 192 | 193 | - yield_array(values) 194 | 195 | ## transform转换函数 196 | 197 | - transform$v4h(key, vkey) 198 | - transform$h4v(key, vkey) 199 | - transform$v2h(key, vkey, [value]) 200 | - transform$h2v(key, vkey, [value]) 201 | - transform$uniqkv(key, vkey, [value]) 202 | 203 | ## 时间窗口函数 204 | 205 | - time_window(time_period, [dt, [offset]]) -------------------------------------------------------------------------------- /docs/使用教程/3、类型注解和类型转换.md: -------------------------------------------------------------------------------- 1 | # 支持多数据源联合查询的SQL运行引擎sycnany-SQL类型注解和类型转换 2 | 3 | sycnany-SQL作为SQL运行引擎并不需要提前定义Schema信息,支持多数据源,其中很多数据源本身就是无Schema信息的,例如NoSQL数据库MongoDB,从数据源查询数据和运行计算默认只是单纯使用输入数据的类型完成查询和计算,此时查询数据或执行计算可能因数据类型不匹配查询失败或计算出现异常,所以一些情况下我们需要在编写SQL时手动添加数据类型注解或使用数据类型转换函数完成数据类型转换。 4 | 5 | 例如:MongoDB的ObjectId保存在MySQL中一般转换为字符串使用varchar类型存储,此时MongoDB和MySQL进行Join关联查询时需把MySQL存储的varchar类型转换为MongoDB的ObjectId才能正确的从MongoDB中加载出关联数据。 6 | 7 | sycnany-SQL支持两种方式完成数据类型转换: 8 | - 使用类型注解 9 | - 使用类型转换函数 10 | 11 | 后续示例中我们假设又如下数据结构: 12 | 13 | ```SQL 14 | # MySQL中mysql_test库users表 15 | CREATE TABLE `mysql_test`.`users` ( 16 | `user_id` bigint(20) NOT NULL, 17 | `name` varchar(64), 18 | `birthday` varchar(20), 19 | `vip_id` varchar(24), 20 | PRIMARY KEY (`user_id`) USING BTREE 21 | ); 22 | ``` 23 | ```BSON 24 | # MongoDB中mongo_test库vips集合数据 25 | [{ 26 | "_id": ObjectId("640a9786bb450457c544f759"), 27 | "vip_name" : "超级VIP", 28 | "create_time" : ISODate("2023-03-10T02:35:50.298Z") 29 | }] 30 | ``` 31 | 32 | 33 | 34 | ### 类型注解使用 35 | 36 | 在查询字段上添加该字段的类型注解信息,从数据加载后会自动转换为该类型再参与后续查询或计算。 37 | 38 | 使用语法:``` `字段名[类型]` ``` 39 | 40 | 例如: 41 | 42 | ```SQL 43 | # 编写Join查询SQL 44 | select a.`user_id`, a.`name`, a.`birthday[date]`, b.`_id[text]` as vip_id, b.`vip_name` from `mysql_test`.`users` a left join `mongo_test`.`vips` b on a.`vip_id[objectid]`=b.`_id`; 45 | ``` 46 | 47 | 该SQL中我们为users表的birthday字段添加了date类型注解,表示该字段需由MySQL表中保存的字符串日期类型转换为date数据类型。 48 | 49 | 而MySQL表中保存vip_id字段为MongoDB中vips集合的主键_id字段的值,在users表和vips表join查询时,为users表的vip_id字段添加了objectid类型注解,在从MongoDB查询关联数据时转换为ObjectId从而正常查询出关联数据。 50 | 51 | 从vips表加载数据成功后,我们又使用类型注解把注解_id字段从ObjectId转换为了字符串类型,以便能输出查询结果。 52 | 53 | 注意:类型注解只能添加在查询字段中并且字段一定要用``包裹,不能用于as后alias名称和函数返回值。 54 | 55 | 56 | 57 | 58 | 支持的注解类型: 59 | 60 | - 整型数字:int、tinyint、smallint、mediumint、bigint 61 | - 浮点型数字:float、double 62 | - 字符串:str、tinytext、mediumtext、text、char、varchar、nchar 63 | - 字节数组:bytes、binary、varbinary、blob 64 | - 布尔型:bool、boolean 65 | - Decimal:decimal 66 | - BSON ObjectId: objectid 67 | - UUID:uuid 68 | - 日期时间:datetime、timestamp 69 | - 日期:date 70 | - 时间:time 71 | - 数组:array 72 | - 集合:set 73 | - HashMap: map 74 | 75 | 76 | 77 | ### 类型转换函数使用 78 | 79 | 类型注解只能添加在查询字段上,但类型转换函数可在查询字段或函数返回值中使用,使用更方便,同时针对内置函数不能满足类型转换的需求,也可以编写加载自定义函数来完成类型转换,以便正确的完成查询和计算。 80 | 81 | 例如: 82 | 83 | ```SQL 84 | # 编写Join查询SQL 85 | select a.`user_id`, a.`name`, convert_date(a.`birthday`) as birthday, convert_string(b.`_id`) as vip_id, b.`vip_name` from `mysql_test`.`users` a left join `mongo_test`.`vips` b on convert_objectid(a.`vip_id`)=b.`_id`; 86 | ``` 87 | 88 | 使用类型转换函数也可完成相同操作。 89 | 90 | 91 | 内置支持的类型转换函数: 92 | 93 | - 转为整型数字:convert_int(expr) 94 | - 转为浮点型数字:convert_float(expr) 95 | - 转为字符串:convert_string(expr) 96 | - 转为字节数组:convert_bytes(expr) 97 | - 转为布尔型:convert_bool(expr) 98 | - 转为Decimal:convert_decimal(expr) 99 | - 转为BSON ObjectId:convert_objectid(expr) 100 | - 转为UUID:convert_uuid(expr) 101 | - 转为日期时间:convert_datetime(expr) 102 | - 转为日期:convert_date(expr) 103 | - 转为时间:convert_time(expr) 104 | - 转为数组:convert_array(expr) 105 | - 转为集合:convert_set(expr) 106 | - 转为HashMap:convert_map(expr) 107 | 108 | 109 | 110 | 111 | ### 针对Join查询On关联条件的特别优化提示 112 | 113 | 在进行Join关联查询时,On条件SQL编写需保证条件能从关联表中正确查询出数据,而不同数据源可能有不同支持,如MySQL等SQL类型数据库会自定进行类型转换,MongoDB则必须手动转换保存查询条件类型一致,所以类型转换的字段不应该在需读取关联数据的表上。 114 | 115 | 如,对应上面的示例SQL,加入我们改写为以下SQL: 116 | 117 | ```SQL 118 | # 编写Join查询SQL 119 | select a.`user_id`, a.`name`, convert_date(a.`birthday`) as birthday, convert_string(b.`_id`) as vip_id, b.`vip_name` from `mysql_test`.`users` a left join `mongo_test`.`vips` b on a.`vip_id`=convert_string(b.`_id`); 120 | ``` 121 | 122 | 以上SQL也能正常执行,但因为Join条件中关联表vips的_id字段存在类型手动类型转换,所以会加载整个表的数据后在内存中完成计算匹配,不能直接使用MongoDB查询语句从数据库中直接读取出对应数据,效率很低且可能因消耗过多资源而出现异常。 123 | 124 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # 示例详解 2 | 3 | ## [mathematical](mathematical) 4 | 5 | 数字和数学计算 6 | 7 | ## [strings](strings) 8 | 9 | 字符串处理 10 | 11 | ## [parameter_variable](parameter_variable) 12 | 13 | 动态参数变量 14 | 15 | ## [type_annotation](type_annotation) 16 | 17 | 类型注解,不同数据库join时需保证类型一致,如Mongo的主键_id保存到mysql时一般为varchar,join查询需做类型转换保证类型一致 18 | 19 | ## [get_value](get_value) 20 | 21 | 读取复杂数据结构值 22 | 23 | ## [yield_data](yield_data) 24 | 25 | 数组展开 26 | 27 | ## [import_python](import_python) 28 | 29 | 导入Python模块调用自定义函数 30 | 31 | ## [memory_temporary_storage](memory_temporary_storage) 32 | 33 | 使用内存保存临时结果 34 | 35 | ## [json](json) 36 | 37 | JSON函数 38 | 39 | ## [aggregate](aggregate) 40 | 41 | 聚合计算 42 | 43 | ## [transform](transform) 44 | 45 | transform数据转换(纵横表转换) 46 | 47 | ## [raw_query](raw_query) 48 | 49 | 可在SQL中添加raw query查询,充分利用数据库功能及性能 50 | 51 | ## [insert_types](insert_types) 52 | 53 | insert into指定合并数据类型(插入 I,更新否则插入 UI,更新否则插入其余删除 UDI,先删除再插入 DI) 54 | 55 | ## [demo](demo) 56 | 57 | 查询JSON文件统计每个站点订单额 58 | 59 | ## [nginx-log](nginx-log) 60 | 61 | 查询Nginx日志文件统计访问量最多的前3个IP -------------------------------------------------------------------------------- /examples/aggregate/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/aggregate/README.md -------------------------------------------------------------------------------- /examples/aggregate/aggregate.sql: -------------------------------------------------------------------------------- 1 | 2 | use `aggregate_customize`; 3 | 4 | 5 | select count(*) as cnt, sum(amount) as total_amount, avg(amount) as avg_amount, min(amount) as min_amount, max(amount) as max_amount from `data/orders.json` where status=0; 6 | 7 | select uid, count(*) as cnt, sum(amount) as total_amount, avg(amount) as avg_amount, min(amount) as min_amount, max(amount) as max_amount from `data/orders.json` where status=0 group by uid; 8 | 9 | select distinct uid, order_id, amount from `data/orders.json` where status=0; 10 | 11 | select count(distinct uid) as ucnt from `data/orders.json` where status=0; 12 | 13 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid; 14 | 15 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid having uid=1; 16 | 17 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid having gcnt>1; 18 | 19 | select uid, sum(amount) / count(*) / 100 as avg_amount from `data/orders.json` where status=0 group by uid; 20 | 21 | select b.name, c.goods_name, count(*) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 22 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 23 | 24 | select b.name, c.goods_name, count(*) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 25 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by b.name, c.goods_name; 26 | 27 | select distinct b.name, c.goods_name from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 28 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 29 | 30 | select b.name, c.goods_name, count(distinct b.name) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 31 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by c.goods_name; 32 | 33 | select uid, group_concat(goods_id) as cgoods_ids, group_array(goods_id) as agoods_ids, group_uniq_array(goods_id) as uagoods_ids from `data/orders.json` where status=0 group by uid; 34 | 35 | select uid, group_bit_and(goods_id) as goods_id_and, group_bit_or(goods_id) as goods_id_or, group_bit_xor(goods_id) as goods_id_xor from `data/orders.json` where status=0 group by uid; 36 | 37 | -- 自定义聚合计算函数 38 | select aggregate_unique(uid) as uids, aggregate_join(order_id) as order_ids from `data/orders.json` where status=0; 39 | 40 | select uid, aggregate_unique(goods_id) as goods_ids, aggregate_join(order_id) as order_ids from `data/orders.json` where status=0 group by uid; 41 | 42 | select uid, length(aggregate_join(order_id)) / 100 as avg_amount, (length(aggregate_join(order_id)) / count(*) + 1) / 100 as percent from `data/orders.json` where status=0 group by uid; 43 | 44 | select b.name, c.goods_name, aggregate_unique(b.name) as names, aggregate_join(c.goods_name) as goods_namees from `data/orders.json` a 45 | join `data/users.json` b on a.uid=b.uid 46 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 47 | 48 | select b.name, c.goods_name, aggregate_unique(b.name) as names, aggregate_join(c.goods_name) as goods_namees from `data/orders.json` a 49 | join `data/users.json` b on a.uid=b.uid 50 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by b.name, c.goods_name; 51 | 52 | select uid, json_arrayagg(goods_id) as agoods_ids, json_objectagg(goods_id, order_id) as ogoods_ids from `data/orders.json` where status=0 group by uid; -------------------------------------------------------------------------------- /examples/aggregate/aggregate_batch.sql: -------------------------------------------------------------------------------- 1 | 2 | use `aggregate_customize`; 3 | set @batch=2; 4 | 5 | select count(*) as cnt, sum(amount) as total_amount, avg(amount) as avg_amount, min(amount) as min_amount, max(amount) as max_amount from `data/orders.json` where status=0; 6 | 7 | select uid, count(*) as cnt, sum(amount) as total_amount, avg(amount) as avg_amount, min(amount) as min_amount, max(amount) as max_amount from `data/orders.json` where status=0 group by uid; 8 | 9 | select distinct uid, order_id, amount from `data/orders.json` where status=0; 10 | 11 | select count(distinct uid) as ucnt from `data/orders.json` where status=0; 12 | 13 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid; 14 | 15 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid having uid=1; 16 | 17 | select uid, count(distinct goods_id) as gcnt from `data/orders.json` where status=0 group by uid having gcnt>1; 18 | 19 | select uid, sum(amount) / count(*) / 100 as avg_amount from `data/orders.json` where status=0 group by uid; 20 | 21 | select b.name, c.goods_name, count(*) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 22 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 23 | 24 | select b.name, c.goods_name, count(*) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 25 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by b.name, c.goods_name; 26 | 27 | select distinct b.name, c.goods_name from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 28 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 29 | 30 | select b.name, c.goods_name, count(distinct b.name) as cnt from `data/orders.json` a join `data/users.json` b on a.uid=b.uid 31 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by c.goods_name; 32 | 33 | select uid, group_concat(goods_id) as cgoods_ids, group_array(goods_id) as agoods_ids, group_uniq_array(goods_id) as uagoods_ids from `data/orders.json` where status=0 group by uid; 34 | 35 | select uid, group_bit_and(goods_id) as goods_id_and, group_bit_or(goods_id) as goods_id_or, group_bit_xor(goods_id) as goods_id_xor from `data/orders.json` where status=0 group by uid; 36 | 37 | -- 自定义聚合计算函数 38 | select aggregate_unique(uid) as uids, aggregate_join(order_id) as order_ids from `data/orders.json` where status=0; 39 | 40 | select uid, aggregate_unique(goods_id) as goods_ids, aggregate_join(order_id) as order_ids from `data/orders.json` where status=0 group by uid; 41 | 42 | select uid, length(aggregate_join(order_id)) / 100 as avg_amount from `data/orders.json` where status=0 group by uid; 43 | 44 | select b.name, c.goods_name, aggregate_unique(b.name) as names, aggregate_join(c.goods_name) as goods_namees from `data/orders.json` a 45 | join `data/users.json` b on a.uid=b.uid 46 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0; 47 | 48 | select b.name, c.goods_name, aggregate_unique(b.name) as names, aggregate_join(c.goods_name) as goods_namees from `data/orders.json` a 49 | join `data/users.json` b on a.uid=b.uid 50 | join `data/goodses.json` c on a.goods_id=c.goods_id where a.status=0 group by b.name, c.goods_name; -------------------------------------------------------------------------------- /examples/aggregate/aggregate_customize.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/25 3 | # create by: snower 4 | 5 | from syncanysql.calculaters import StateAggregateCalculater, register_calculater 6 | 7 | 8 | @register_calculater("aggregate_unique") 9 | class UniqueSetAggregateCalculater(StateAggregateCalculater): 10 | def aggregate(self, state_value, data_value): 11 | if data_value is None: 12 | return state_value 13 | if state_value is None: 14 | return {data_value} 15 | state_value.add(data_value) 16 | return state_value 17 | 18 | def reduce(self, state_value, data_value): 19 | if data_value is None: 20 | return state_value 21 | if state_value is None: 22 | return data_value 23 | return state_value | data_value 24 | 25 | def final_value(self, state_value): 26 | return state_value 27 | 28 | 29 | @register_calculater("aggregate_join") 30 | class JoinArrayAggregateCalculater(StateAggregateCalculater): 31 | def aggregate(self, state_value, data_value): 32 | if data_value is None: 33 | return state_value 34 | if state_value is None: 35 | return [str(data_value)] 36 | state_value.append(str(data_value)) 37 | return state_value 38 | 39 | def reduce(self, state_value, data_value): 40 | if data_value is None: 41 | return state_value 42 | if state_value is None: 43 | return data_value 44 | return state_value + data_value 45 | 46 | def final_value(self, state_value): 47 | if not state_value: 48 | return "" 49 | return ",".join(state_value) 50 | -------------------------------------------------------------------------------- /examples/aggregate/data/goodses.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "goods_id": 1, 4 | "goods_name": "青菜" 5 | }, 6 | { 7 | "goods_id": 2, 8 | "goods_name": "白菜" 9 | } 10 | ] -------------------------------------------------------------------------------- /examples/aggregate/data/order_historys.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "order_id": 1, 5 | "history_type": 1, 6 | "uid": 2, 7 | "goods_id": 1, 8 | "amount": 9.6, 9 | "status": 0, 10 | "create_time": "2024-10-01 10:09:10" 11 | }, 12 | { 13 | "id": 2, 14 | "order_id": 1, 15 | "history_type": 0, 16 | "uid": 2, 17 | "goods_id": 1, 18 | "amount": 5.2, 19 | "status": 0, 20 | "create_time": "2024-10-02 14:09:10" 21 | }, 22 | { 23 | "id": 3, 24 | "order_id": 2, 25 | "history_type": 1, 26 | "uid": 1, 27 | "goods_id": 1, 28 | "amount": 7.6, 29 | "status": 0, 30 | "create_time": "2024-10-01 12:09:10" 31 | }, 32 | { 33 | "id": 4, 34 | "order_id": 1, 35 | "history_type": 1, 36 | "uid": 2, 37 | "goods_id": 1, 38 | "amount": 19.1, 39 | "status": 0, 40 | "create_time": "2024-10-06 11:09:10" 41 | }, 42 | { 43 | "id": 5, 44 | "order_id": 3, 45 | "history_type": 1, 46 | "uid": 2, 47 | "goods_id": 2, 48 | "amount": 3, 49 | "status": 0, 50 | "create_time": "2024-10-02 09:09:10" 51 | }, 52 | { 53 | "id": 6, 54 | "order_id": 3, 55 | "history_type": 0, 56 | "uid": 2, 57 | "goods_id": 2, 58 | "amount": 3.3, 59 | "status": 0, 60 | "create_time": "2024-10-04 09:09:10" 61 | }, 62 | { 63 | "id": 7, 64 | "order_id": 3, 65 | "history_type": 1, 66 | "uid": 2, 67 | "goods_id": 2, 68 | "amount": 4.3, 69 | "status": 0, 70 | "create_time": "2024-10-08 09:09:10" 71 | }, 72 | { 73 | "id": 8, 74 | "order_id": 4, 75 | "history_type": 1, 76 | "uid": 1, 77 | "goods_id": 1, 78 | "amount": 8, 79 | "status": 0, 80 | "create_time": "2024-10-01 10:09:10" 81 | }, 82 | { 83 | "id": 9, 84 | "order_id": 3, 85 | "history_type": 0, 86 | "uid": 2, 87 | "goods_id": 7.12, 88 | "amount": 3, 89 | "status": 0, 90 | "create_time": "2024-10-11 09:09:10" 91 | }, 92 | { 93 | "id": 10, 94 | "order_id": 3, 95 | "history_type": 0, 96 | "uid": 2, 97 | "goods_id": 2, 98 | "amount": 31.1, 99 | "status": 0, 100 | "create_time": "2024-10-14 19:09:10" 101 | }, 102 | { 103 | "id": 11, 104 | "order_id": 5, 105 | "history_type": 1, 106 | "uid": 2, 107 | "goods_id": 1, 108 | "amount": 8, 109 | "status": 0, 110 | "create_time": "2024-10-01 10:09:10" 111 | }, 112 | { 113 | "id": 12, 114 | "order_id": 6, 115 | "history_type": 1, 116 | "uid": 2, 117 | "goods_id": 2, 118 | "amount": 7, 119 | "status": 0, 120 | "create_time": "2024-10-01 10:09:10" 121 | }, 122 | { 123 | "id": 13, 124 | "order_id": 5, 125 | "history_type": 0, 126 | "uid": 2, 127 | "goods_id": 1, 128 | "amount": 18.9, 129 | "status": 0, 130 | "create_time": "2024-10-04 12:09:10" 131 | }, 132 | { 133 | "id": 14, 134 | "order_id": 5, 135 | "history_type": 1, 136 | "uid": 2, 137 | "goods_id": 1, 138 | "amount": 81.9, 139 | "status": 0, 140 | "create_time": "2024-10-06 15:09:10" 141 | } 142 | ] -------------------------------------------------------------------------------- /examples/aggregate/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 2, 5 | "goods_id": 1, 6 | "amount": 9.6, 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 1, 12 | "goods_id": 1, 13 | "amount": 7.6, 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 2, 19 | "goods_id": 2, 20 | "amount": 3, 21 | "status": 0 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 1, 26 | "goods_id": 1, 27 | "amount": 8, 28 | "status": 0 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "goods_id": 1, 34 | "amount": 8, 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "uid": 2, 40 | "goods_id": 2, 41 | "amount": 7, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/aggregate/data/users.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "name": "王五" 5 | }, 6 | { 7 | "uid": 2, 8 | "name": "李四" 9 | } 10 | ] -------------------------------------------------------------------------------- /examples/aggregate/window_aggregate.sql: -------------------------------------------------------------------------------- 1 | SELECT a.order_id, b.name, c.goods_name, count(order_id) as cnt, sum(a.amount) as total_amount, a.first_create_time AS order_time, a.next_create_time AS unorder_time 2 | FROM 3 | (SELECT oha.`id`, oha.`order_id`, oha.`history_type`, oha.`uid`, oha.`goods_id`, oha.`amount`, oha.`status`, oha.`create_time`, 4 | LEAD(oha.history_type) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_history_type, 5 | LEAD(oha.create_time) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_create_time, 6 | IFNULL(MAX(ohb.create_time), oha.create_time) AS first_create_time 7 | FROM `data/order_historys.json` oha 8 | LEFT JOIN `data/order_historys.json` ohb ON ohb.order_id=oha.order_id AND ohb.create_time<=oha.create_time AND ohb.history_type>0 9 | GROUP BY oha.id ) a 10 | LEFT JOIN `data/users.json` b on a.uid=b.uid 11 | LEFT JOIN `data/goodses.json` c on a.goods_id=c.goods_id 12 | WHERE (a.next_history_type>0 OR a.next_history_type is null) group by order_id ORDER BY order_id; 13 | 14 | select oha.`id`, oha.`order_id`,oha.`history_type`, 15 | LEAD(oha.history_type) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_history_type, 16 | LEAD(oha.create_time) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_create_time, 17 | IFNULL(MAX(ohb.create_time), oha.create_time) AS first_create_time 18 | FROM `data/order_historys.json` oha 19 | LEFT JOIN `data/order_historys.json` ohb ON ohb.order_id=oha.order_id AND ohb.create_time<=oha.create_time AND ohb.history_type>0 20 | Where order_id=1 group by oha.id; 21 | 22 | select oha.`id`, oha.`order_id`,oha.`history_type`, count(oha.order_id) as cnt, sum(oha.amount) as total_amount, 23 | LEAD(oha.history_type) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_history_type, 24 | LEAD(oha.create_time) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_create_time 25 | FROM `data/order_historys.json` oha group by oha.order_id order by order_id; 26 | 27 | SELECT a.order_id, b.name, c.goods_name, count(order_id) as cnt, sum(a.amount) as total_amount, a.first_create_time AS order_time, a.next_create_time AS unorder_time 28 | FROM 29 | (SELECT oha.`id`, oha.`order_id`, oha.`history_type`, oha.`uid`, oha.`goods_id`, oha.`amount`, oha.`status`, oha.`create_time`, 30 | LEAD(oha.history_type) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_history_type, 31 | LEAD(oha.create_time) OVER (PARTITION BY oha.order_id ORDER BY oha.create_time) AS next_create_time, 32 | IFNULL(MAX(ohb.create_time), oha.create_time) AS first_create_time 33 | FROM `data/order_historys.json` oha 34 | LEFT JOIN `data/order_historys.json` ohb ON ohb.order_id=oha.order_id AND ohb.create_time<=oha.create_time AND ohb.history_type>0 35 | GROUP BY oha.id ) a 36 | LEFT JOIN `data/users.json` b on a.uid=b.uid 37 | LEFT JOIN `data/goodses.json` c on a.goods_id=c.goods_id 38 | WHERE (a.next_history_type>0 OR a.next_history_type is null) group by order_id having cnt>1 ORDER BY order_id; -------------------------------------------------------------------------------- /examples/datetime/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/datetime/README.md -------------------------------------------------------------------------------- /examples/datetime/datetime.sql: -------------------------------------------------------------------------------- 1 | 2 | select NOW(); 3 | 4 | select NOW(0), NOW('-1d'), NOW('+3d'), NOW('-3d', 0), NOW('-3d', 0, 10, 11); 5 | 6 | select DATE_ADD(now(), 1), ADDDATE(now(), INTERVAL 20 DAY), DATE_SUB(now(), INTERVAL 10 DAY), SUBDATE(now(), INTERVAL 7 MONTH); 7 | 8 | select ADDTIME(now(), '10:00'), SUBTIME(now(), '1 10:00'); 9 | 10 | select DATE_FORMAT(datetime('2023-04-24 17:07:08'), '%Y-%m-%d %H:%M:%S'), TIME_FORMAT(datetime('2023-04-24 17:07:08'), '%H:%M:%S'), TIME_TO_SEC('10:11:00'), SEC_TO_TIME(234); 11 | 12 | select CURDATE(), CURRENT_DATE(), CURRENT_TIME(), CURTIME(); 13 | 14 | select FROM_UNIXTIME(1677833819), UNIX_TIMESTAMP(), UNIX_TIMESTAMP(now()), CURRENT_TIMESTAMP(); 15 | 16 | select UTC_DATE(), UTC_TIME(), UTC_TIMESTAMP(); 17 | 18 | select date(now()), datetime(now()), time(now()), datetime(date(now())), datetime(time(now())), date(time(now())), time(date(now())); 19 | 20 | select DATE_FORMAT(convert_datetime('1600-01-01'), '%Y-%m-%d %H:%M:%S'), DATE_FORMAT(convert_datetime('1900-01-01 10:12:23'), '%Y-%m-%d %H:%M:%S'), TIME_FORMAT(convert_datetime('1800-01-01 10:12:23'), '%H:%M:%S'); -------------------------------------------------------------------------------- /examples/demo/README.md: -------------------------------------------------------------------------------- 1 | # 运行 2 | 3 | ```bash 4 | syncany-sql demo.sql 5 | ``` -------------------------------------------------------------------------------- /examples/demo/data/demo.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "60015607fc1d0000e70006c6", 4 | "start_date": "2021-01-10", 5 | "vip_type": "1", 6 | "rules": [ 7 | { 8 | "id": 1, 9 | "timeout_time": "11:00:00" 10 | } 11 | ], 12 | "sites": [ 13 | 8, 14 | 15, 15 | 21, 16 | 26 17 | ], 18 | "created_at": "2021-01-15 16:44:55" 19 | }, 20 | { 21 | "_id": "60015625fc1d0000e70006ce", 22 | "start_date": "2021-01-16", 23 | "vip_type": "2", 24 | "rules": [ 25 | { 26 | "id": 1, 27 | "timeout_time": "16:00:00" 28 | } 29 | ], 30 | "sites": [ 31 | 8, 32 | 21, 33 | 26 34 | ], 35 | "created_at": "2021-01-30 15:00:48" 36 | }, 37 | { 38 | "_id": "600fcec9a2ca7d61d033e729", 39 | "start_date": "2021-02-10", 40 | "vip_type": "1", 41 | "rules": [ 42 | { 43 | "id": 1, 44 | "timeout_time": "10:00:00" 45 | } 46 | ], 47 | "sites": [ 48 | 15, 49 | 28 50 | ], 51 | "created_at": "2021-01-26 16:11:53" 52 | }, 53 | { 54 | "_id": "600fcee0d09e444f73340338", 55 | "start_date": "2021-02-14", 56 | "vip_type": "2", 57 | "rules": [ 58 | { 59 | "id": 1, 60 | "timeout_time": "15:00:00" 61 | } 62 | ], 63 | "sites": [ 64 | 15, 65 | 28 66 | ], 67 | "created_at": "2021-01-26 16:12:16" 68 | }, 69 | { 70 | "_id": "6038a00a8c211a13af3cf0ba", 71 | "start_date": "2021-01-24", 72 | "vip_type": "1", 73 | "rules": [ 74 | { 75 | "id": 1, 76 | "timeout_time": "11:20:00" 77 | } 78 | ], 79 | "sites": [ 80 | 34 81 | ], 82 | "created_at": "2021-02-26 15:15:22" 83 | }, 84 | { 85 | "_id": "6038a01749ca705465706963", 86 | "start_date": "2021-02-12", 87 | "vip_type": "2", 88 | "rules": [ 89 | { 90 | "id": 1, 91 | "timeout_time": "16:40:00" 92 | } 93 | ], 94 | "sites": [ 95 | 34 96 | ], 97 | "created_at": "2021-02-26 15:15:35" 98 | } 99 | ] -------------------------------------------------------------------------------- /examples/demo/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "site_id": 8, 5 | "amount": 10, 6 | "status": 0 7 | }, 8 | { 9 | "order_id": 2, 10 | "site_id": 15, 11 | "amount": 7.2, 12 | "status": 0 13 | }, 14 | { 15 | "order_id": 3, 16 | "site_id": 8, 17 | "amount": 2.8, 18 | "status": 1 19 | }, 20 | { 21 | "order_id": 4, 22 | "site_id": 28, 23 | "amount": 4.7, 24 | "status": 1 25 | }, 26 | { 27 | "order_id": 5, 28 | "site_id": 8, 29 | "amount": 3.5, 30 | "status": 0 31 | }, 32 | { 33 | "order_id": 6, 34 | "site_id": 34, 35 | "amount": 11.2, 36 | "status": 0 37 | }, 38 | { 39 | "order_id": 7, 40 | "site_id": 8, 41 | "amount": 3.54, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/demo/data/sites.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "site_id": 8, 4 | "name": "黄豆网", 5 | "status": 0 6 | }, 7 | { 8 | "site_id": 15, 9 | "name": "青菜网", 10 | "status": 0 11 | }, 12 | { 13 | "site_id": 21, 14 | "name": "去啥网", 15 | "status": 0 16 | }, 17 | { 18 | "site_id": 26, 19 | "name": "汽车网", 20 | "status": 0 21 | }, 22 | { 23 | "site_id": 28, 24 | "name": "火箭网", 25 | "status": 0 26 | }, 27 | { 28 | "site_id": 34, 29 | "name": "卫星网", 30 | "status": 0 31 | } 32 | ] -------------------------------------------------------------------------------- /examples/demo/demo.sql: -------------------------------------------------------------------------------- 1 | #!/bin/env syncany-sql 2 | 3 | SELECT 4 | a.site_id, 5 | b.name AS site_name, 6 | IF(c.site_amount > 0, c.site_amount, 0) AS site_amount, 7 | MAX(a.timeout_at) AS timeout_at, 8 | MAX(a.vip_timeout_at) AS vip_timeout_at 9 | FROM 10 | (SELECT 11 | YIELD_ARRAY(sites) AS site_id, 12 | IF(vip_type = '2', GET_VALUE(rules, 0, 'timeout_time'), '') AS timeout_at, 13 | IF(vip_type = '1', GET_VALUE(rules, 0, 'timeout_time'), '') AS vip_timeout_at 14 | FROM 15 | `data/demo.json` 16 | WHERE 17 | start_date >= '2021-01-01') a 18 | JOIN 19 | `data/sites.json` b ON a.site_id = b.site_id 20 | JOIN 21 | (SELECT 22 | site_id, SUM(amount) AS site_amount 23 | FROM 24 | `data/orders.json` 25 | WHERE 26 | status <= 0 27 | GROUP BY site_id) c ON a.site_id = c.site_id 28 | GROUP BY a.site_id; 29 | -------------------------------------------------------------------------------- /examples/demo/demo2.sql: -------------------------------------------------------------------------------- 1 | 2 | SELECT yield_array(a.sites) as site_id, b.name as site_name FROM `data/demo.json` a 3 | JOIN `data/sites.json` b ON a.sites = b.site_id; 4 | 5 | select * from `data/orders.json` order by amount desc limit 2; 6 | 7 | select a.order_id, a.site_id, a.amount * 100 as amount from `data/orders.json` a order by site_id desc, a.order_id desc limit 2; 8 | 9 | select a.order_id, a.site_id, a.amount * 100 as amount from `data/orders.json` a order by a.amount * 100 desc limit 2; 10 | 11 | execute `execute.sql`; 12 | 13 | execute `json/demo.json`; 14 | 15 | select site_id, site_name, site_amount, timeout_at, vip_timeout_at from execute_demo_sql_data; 16 | 17 | select site_id, site_name, site_amount, timeout_at, vip_timeout_at from execute_demo_json_data; -------------------------------------------------------------------------------- /examples/demo/execute.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO `execute_demo_sql_data` SELECT 2 | a.site_id, 3 | b.name AS site_name, 4 | IF(c.site_amount > 0, c.site_amount, 0) AS site_amount, 5 | MAX(a.timeout_at) AS timeout_at, 6 | MAX(a.vip_timeout_at) AS vip_timeout_at 7 | FROM 8 | (SELECT 9 | YIELD_ARRAY(sites) AS site_id, 10 | IF(vip_type = '2', GET_VALUE(rules, 0, 'timeout_time'), '') AS timeout_at, 11 | IF(vip_type = '1', GET_VALUE(rules, 0, 'timeout_time'), '') AS vip_timeout_at 12 | FROM 13 | `data/demo.json` 14 | WHERE 15 | start_date >= '2021-01-01') a 16 | JOIN 17 | `data/sites.json` b ON a.site_id = b.site_id 18 | JOIN 19 | (SELECT 20 | site_id, SUM(amount) AS site_amount 21 | FROM 22 | `data/orders.json` 23 | WHERE 24 | status <= 0 25 | GROUP BY site_id) c ON a.site_id = c.site_id 26 | GROUP BY a.site_id; -------------------------------------------------------------------------------- /examples/demo/json/database.json: -------------------------------------------------------------------------------- 1 | { 2 | "databases": [ 3 | { 4 | "name": "stdio", 5 | "driver": "textline" 6 | }, 7 | { 8 | "name": "data", 9 | "driver": "json", 10 | "path": "./data" 11 | }, 12 | { 13 | "name": "--", 14 | "driver": "memory" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /examples/demo/json/demo.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["json/database.json", "json/log.json"], 3 | "name": "demo", 4 | "input": "<<&.data.demo.json::_id", 5 | "output": ">>&.--.execute_demo_json_data::site_id use I", 6 | "caches": [{ 7 | "name": "site_amount", 8 | "database": "--", 9 | "prefix_key": "site:amount", 10 | "exprie_seconds": 300 11 | }], 12 | "querys": { 13 | "start_date": {">=": "2021-01-01"} 14 | }, 15 | "schema": { 16 | "site_id": ["#yield", "$.sites", [ 17 | ":#aggregate", "$.*|int", "$$.*|int" 18 | ]], 19 | "site_name": ["#yield", "$.sites", [ 20 | ":#aggregate", "$.*|int", [ 21 | "$$.*|int", ["&.data.sites.json::site_id", {"status|int": {">=": 0}}], ":$.name" 22 | ] 23 | ]], 24 | "site_amount": ["#yield", "$.sites", [ 25 | ":#aggregate", "$.*|int", [ 26 | "#cache", "site_amount", "$$.*|int", [ 27 | "$$.*|int", "&.data.orders.json::site_id", [ 28 | ":#foreach|int", "$.*|array", [ 29 | "#if", ["@lte", "$.status", 0], ["#make", {"value": "$.amount"}], "#continue" 30 | ], [ 31 | ":@sum", "$.*|array", "value" 32 | ] 33 | ] 34 | ] 35 | ] 36 | ]], 37 | "timeout_at": ["#yield", "$.sites", [ 38 | ":#aggregate", "$.*|int", { 39 | "#case": "$$$.vip_type", 40 | "1": "$.timeout_at", 41 | "#end": "$$$.rules.:0.timeout_time" 42 | } 43 | ]], 44 | "vip_timeout_at": ["#yield", "$.sites", [ 45 | ":#aggregate", "$.*|int", { 46 | "#match": "$$$.vip_type", 47 | "/2/": "$$.vip_timeout_at", 48 | "#end": "$$$.rules.:0.timeout_time" 49 | } 50 | ]], 51 | "created_at?": "@now" 52 | } 53 | } -------------------------------------------------------------------------------- /examples/demo/json/log.json: -------------------------------------------------------------------------------- 1 | { 2 | "logger": { 3 | "version": 1, 4 | "formatters": { 5 | "console": { 6 | "fmt": "%(asctime)s %(process)d %(levelname)s %(message)s", 7 | "datefmt": null, 8 | "()": "logging.Formatter" 9 | } 10 | }, 11 | "handlers": { 12 | "console": { 13 | "level": "DEBUG", 14 | "class": "logging.StreamHandler", 15 | "formatter": "console" 16 | } 17 | }, 18 | "loggers": { 19 | "": { 20 | "handlers": [ 21 | "console" 22 | ], 23 | "level": "DEBUG" 24 | } 25 | } 26 | } 27 | } -------------------------------------------------------------------------------- /examples/functions/generate.sql: -------------------------------------------------------------------------------- 1 | 2 | select objectid() as a, objectid('65bb4211eda4fed2e199073e') as b, uuid$uuid4() as c, uuid('54aa0a5c-b54f-4628-8391-3756007d5fc3') as d, snowflakeid() as e, snowflakeid(0) as f; 3 | 4 | select random() as a, random$int(0, 10000) as b, random$string(10) as c, random$hexs(10) as d, random$letters(10) as e, random$digits(10) as f, random$prints(10) as g, random$bytes(10) as h; 5 | 6 | select random$choice(1, 2, 3, 4) as a; 7 | -------------------------------------------------------------------------------- /examples/get_value/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/get_value/README.md -------------------------------------------------------------------------------- /examples/get_value/data/data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "userId": 123, 4 | "order": { 5 | "orderId": 3243243, 6 | "goodses": [ 7 | { 8 | "goodsId": "G124344", 9 | "goodsName": "商品1" 10 | }, 11 | { 12 | "goodsId": "G124345", 13 | "goodsName": "商品2" 14 | } 15 | ], 16 | "payInfo": { 17 | "payId": "23232323232323", 18 | "payTypes": [1, 3, 4], 19 | "amount": 12.9 20 | }, 21 | "channel.:0": "weixin", 22 | "channel.:1": "ali", 23 | "refundInfo": [ 24 | { 25 | "refundId": "111111", 26 | "amount": 11.3 27 | } 28 | ], 29 | "address.mobile": "12345678911" 30 | }, 31 | "user.username": "snower", 32 | "user.nickname": "哈哈" 33 | } 34 | ] -------------------------------------------------------------------------------- /examples/get_value/get_value.sql: -------------------------------------------------------------------------------- 1 | 2 | insert into `orders` select * from `data/data.json`; 3 | 4 | select `order.orderId` as orderId, `user.username` as username, `order.payInfo.payId` as payId, `order.refundInfo.refundId` as refundId from `orders`; 5 | 6 | select get_value(`order`, 'orderId') as orderId, get_value(`user.username`) as username, get_value(`order`, 'payInfo.payId') as payId, get_value(`order`, 'refundInfo.refundId') as refundId from `orders`; 7 | 8 | select `order.address.mobile` as mobile1, get_value(`order`, 'address.mobile') as mobile2 from `orders`; 9 | 10 | select `order.payInfo.payTypes.:0` as payTypes1, get_value(`order`, 'payInfo.payTypes', 0) as payTypes2, get_value(`order`, 'payInfo.payTypes.:0') as payTypes3 from `orders`; 11 | 12 | select `order.payInfo.payTypes.:0:2` as payTypes1, get_value(`order`, 'payInfo.payTypes', (0, 2)) as payTypes2, get_value(`order`, 'payInfo.payTypes.:0:2') as payTypes3 from `orders`; 13 | 14 | select `order.payInfo.payTypes.:-1:-3:-1` as payTypes1, get_value(`order`, 'payInfo.payTypes', (-1, -3, -1)) as payTypes2, get_value(`order`, 'payInfo.payTypes.:-1:-3:-1') as payTypes3 from `orders`; 15 | 16 | select `order.channel.:0` as channel1, get_value(`order`, 'channel.:0') as channel2 from `orders`; -------------------------------------------------------------------------------- /examples/import_python/README.md: -------------------------------------------------------------------------------- 1 | # 自定义函数示例 2 | 3 | 可使用以下命令执行该Example: 4 | 5 | ```bash 6 | syncany-sql import_python.sql 7 | 8 | syncany-sql import_python2.sql 9 | ``` -------------------------------------------------------------------------------- /examples/import_python/config.yaml: -------------------------------------------------------------------------------- 1 | 2 | extensions: 3 | - syncany_ext 4 | 5 | imports: 6 | math: math 7 | 8 | executes: 9 | - init.sql -------------------------------------------------------------------------------- /examples/import_python/import_python.sql: -------------------------------------------------------------------------------- 1 | 2 | use `utils`; 3 | use `pendulum.parsing`; 4 | use `sys`; 5 | use `os`; 6 | use `datetime as python_datetime`; 7 | 8 | select utils$hello(); 9 | 10 | select utils$add_number(1, 2), utils$sum_array((1, 2, 3)); 11 | 12 | select parsing$parse('2023-02-10 10:33:22'); 13 | 14 | select sys$version(), os$getcwd(); 15 | 16 | select python_datetime$datetime$now(); -------------------------------------------------------------------------------- /examples/import_python/import_python2.sql: -------------------------------------------------------------------------------- 1 | 2 | use `utils`; 3 | use `pendulum.parsing`; 4 | use `sys`; 5 | use `os`; 6 | use `datetime as python_datetime`; 7 | 8 | select utils$hello(); 9 | 10 | select utils$add_number(1, 2), utils$sum_array((1, 2, 3)); 11 | 12 | select parsing$parse('2023-02-10 10:33:22'); 13 | 14 | select sys$version(), os$getcwd(); 15 | 16 | select python_datetime$datetime$now(); 17 | 18 | select ext_sum_func(1, 2); 19 | 20 | select math$pow(2, 3); 21 | 22 | select util_helpers_sum(1, 2); 23 | 24 | select uh$sum(1, 2) as v1, uh$load_time() as v2, uh$loadTime() as v3, uh$LoadTime() as v4, uh$LOAD_TIME() as v5; 25 | 26 | select uh$A$sum(uh$A(), 1, 2); -------------------------------------------------------------------------------- /examples/import_python/init.sql: -------------------------------------------------------------------------------- 1 | use `util_helpers as uh`; -------------------------------------------------------------------------------- /examples/import_python/syncany_ext.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/8 3 | # create by: snower 4 | 5 | from syncanysql import Calculater, register_calculater 6 | 7 | @register_calculater("ext_sum_func") 8 | class SumCalculater(Calculater): 9 | def calculate(self, a, b): 10 | return a + b 11 | -------------------------------------------------------------------------------- /examples/import_python/util_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/8 3 | # create by: snower 4 | 5 | import time 6 | from syncanysql import Calculater, register_calculater 7 | 8 | @register_calculater("util_helpers_sum") 9 | class SumCalculater(Calculater): 10 | def calculate(self, a, b): 11 | return a + b 12 | 13 | def sum(a, b): 14 | return a + b 15 | 16 | LoadTime = time.time() 17 | 18 | class A(object): 19 | def sum(self, a, b): 20 | return a + b -------------------------------------------------------------------------------- /examples/import_python/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/3/6 3 | # create by: snower 4 | 5 | def hello(): 6 | return "hello world!" 7 | 8 | def add_number(x, y): 9 | return x + y 10 | 11 | def sum_array(x): 12 | return sum(x) -------------------------------------------------------------------------------- /examples/insert_types/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/insert_types/README.md -------------------------------------------------------------------------------- /examples/insert_types/delete_insert.sql: -------------------------------------------------------------------------------- 1 | insert into `cdata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 2 | insert into `cdata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 3 | insert into `cdata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 4 | insert into `ndata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 5 | insert into `ndata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 6 | insert into `ndata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 7 | 8 | -- 以条件先删除数据再插入(第一个字段`id`为主键) 9 | insert into `cdata` select `id`, `name`, `create_time` from `ndata` where `id`>=2; 10 | select `id`, `name`, `create_time` from `cdata`; -------------------------------------------------------------------------------- /examples/insert_types/insert.sql: -------------------------------------------------------------------------------- 1 | insert into `cdata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 2 | insert into `cdata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 3 | insert into `cdata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 4 | insert into `ndata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 5 | insert into `ndata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 6 | insert into `ndata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 7 | 8 | -- 只插入(第一个字段`id`为主键) 9 | insert into `cdata` select `id`, `name`, `create_time` from `ndata` where `id`>=2; 10 | select `id`, `name`, `create_time` from `cdata`; 11 | 12 | -------------------------------------------------------------------------------- /examples/insert_types/update.sql: -------------------------------------------------------------------------------- 1 | insert into `cdata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 2 | insert into `cdata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 3 | insert into `cdata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 4 | insert into `ndata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 5 | insert into `ndata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 6 | insert into `ndata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 7 | 8 | -- 以主键IN查询加载数据后以主键`id`判断,存在则更新否则插入(第一个字段`id`为主键) 9 | insert into `cdata` select `id`, `name`, `create_time` from `ndata` where `id`>=2; 10 | select `id`, `name`, `create_time` from `cdata`; 11 | -------------------------------------------------------------------------------- /examples/insert_types/update_delete_insert.sql: -------------------------------------------------------------------------------- 1 | insert into `cdata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 2 | insert into `cdata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 3 | insert into `cdata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 4 | insert into `ndata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 5 | insert into `ndata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 6 | insert into `ndata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 7 | 8 | -- 以条件先加载数据后以主键`id`判断,存在则更新否则插入,不在本次数据中则删除(第一个字段`id`为主键) 9 | insert into `cdata` select `id`, `name`, `create_time` from `ndata` where `id`>=2; 10 | select `id`, `name`, `create_time` from `cdata`; 11 | 12 | insert into `adata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 13 | insert into `adata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 14 | insert into `adata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 15 | insert into `bdata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 16 | insert into `bdata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 17 | insert into `bdata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 18 | 19 | set @use_output_type='UPDATE_DELETE_INSERT'; 20 | insert into `adata` select `id`, `name`, `create_time` from `bdata` where `id`>=2; 21 | select `id`, `name`, `create_time` from `adata`; -------------------------------------------------------------------------------- /examples/insert_types/update_insert.sql: -------------------------------------------------------------------------------- 1 | insert into `cdata` select 1 as `id`, '萝卜' as `name`, '2023-03-12 10:12:34' as `create_time`; 2 | insert into `cdata` select 2 as `id`, '土豆' as `name`, '2023-03-12 10:12:34' as `create_time`; 3 | insert into `cdata` select 4 as `id`, '花菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 4 | insert into `ndata` select 1 as `id`, '白菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 5 | insert into `ndata` select 2 as `id`, '青菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 6 | insert into `ndata` select 3 as `id`, '油麦菜' as `name`, '2023-03-12 10:12:34' as `create_time`; 7 | 8 | -- 以主键IN查询加载数据后以主键`id`判断,存在则更新否则插入(第一个字段`id`为主键) 9 | insert into `cdata` select `id`, `name`, `create_time` from `ndata` where `id`>=2; 10 | select `id`, `name`, `create_time` from `cdata`; 11 | -------------------------------------------------------------------------------- /examples/joins/data/devices.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "order_type": 1, 5 | "out_order_no": "2", 6 | "code": "123" 7 | }, 8 | { 9 | "uid": 2, 10 | "order_type": 1, 11 | "out_order_no": "1", 12 | "code": "124" 13 | }, 14 | { 15 | "uid": 2, 16 | "order_type": 1, 17 | "out_order_no": "1", 18 | "code": "125" 19 | }, 20 | { 21 | "uid": 1, 22 | "order_type": 2, 23 | "out_order_no": "10", 24 | "code": "126" 25 | }, 26 | { 27 | "uid": 2, 28 | "order_type": 2, 29 | "out_order_no": "11", 30 | "code": "127" 31 | } 32 | ] -------------------------------------------------------------------------------- /examples/joins/data/goodses.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "goods_id": 1, 5 | "goods_name": "青菜", 6 | "status": 0 7 | }, 8 | { 9 | "uid": 1, 10 | "goods_id": 2, 11 | "goods_name": "白菜", 12 | "status": 0 13 | }, 14 | { 15 | "uid": 2, 16 | "goods_id": 3, 17 | "goods_name": "萝卜", 18 | "status": 0 19 | } 20 | ] -------------------------------------------------------------------------------- /examples/joins/data/order_historys.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "order_id": 1, 5 | "history_type": 1, 6 | "uid": 2, 7 | "goods_id": 1, 8 | "amount": 9.6, 9 | "status": 0, 10 | "create_time": "2024-10-01 10:09:10" 11 | }, 12 | { 13 | "id": 2, 14 | "order_id": 1, 15 | "history_type": 0, 16 | "uid": 2, 17 | "goods_id": 1, 18 | "amount": 5.2, 19 | "status": 0, 20 | "create_time": "2024-10-02 14:09:10" 21 | }, 22 | { 23 | "id": 3, 24 | "order_id": 2, 25 | "history_type": 1, 26 | "uid": 1, 27 | "goods_id": 1, 28 | "amount": 7.6, 29 | "status": 0, 30 | "create_time": "2024-10-01 12:09:10" 31 | }, 32 | { 33 | "id": 4, 34 | "order_id": 1, 35 | "history_type": 1, 36 | "uid": 2, 37 | "goods_id": 1, 38 | "amount": 19.1, 39 | "status": 0, 40 | "create_time": "2024-10-06 11:09:10" 41 | }, 42 | { 43 | "id": 5, 44 | "order_id": 3, 45 | "history_type": 1, 46 | "uid": 2, 47 | "goods_id": 2, 48 | "amount": 3, 49 | "status": 0, 50 | "create_time": "2024-10-02 09:09:10" 51 | }, 52 | { 53 | "id": 6, 54 | "order_id": 3, 55 | "history_type": 0, 56 | "uid": 2, 57 | "goods_id": 2, 58 | "amount": 3.3, 59 | "status": 0, 60 | "create_time": "2024-10-04 09:09:10" 61 | }, 62 | { 63 | "id": 7, 64 | "order_id": 3, 65 | "history_type": 1, 66 | "uid": 2, 67 | "goods_id": 2, 68 | "amount": 4.3, 69 | "status": 0, 70 | "create_time": "2024-10-08 09:09:10" 71 | }, 72 | { 73 | "id": 8, 74 | "order_id": 4, 75 | "history_type": 1, 76 | "uid": 1, 77 | "goods_id": 1, 78 | "amount": 8, 79 | "status": 0, 80 | "create_time": "2024-10-01 10:09:10" 81 | }, 82 | { 83 | "id": 9, 84 | "order_id": 3, 85 | "history_type": 0, 86 | "uid": 2, 87 | "goods_id": 7.12, 88 | "amount": 3, 89 | "status": 0, 90 | "create_time": "2024-10-11 09:09:10" 91 | }, 92 | { 93 | "id": 10, 94 | "order_id": 3, 95 | "history_type": 0, 96 | "uid": 2, 97 | "goods_id": 2, 98 | "amount": 31.1, 99 | "status": 0, 100 | "create_time": "2024-10-14 19:09:10" 101 | }, 102 | { 103 | "id": 11, 104 | "order_id": 5, 105 | "history_type": 1, 106 | "uid": 2, 107 | "goods_id": 1, 108 | "amount": 8, 109 | "status": 0, 110 | "create_time": "2024-10-01 10:09:10" 111 | }, 112 | { 113 | "id": 12, 114 | "order_id": 6, 115 | "history_type": 1, 116 | "uid": 2, 117 | "goods_id": 2, 118 | "amount": 7, 119 | "status": 0, 120 | "create_time": "2024-10-01 10:09:10" 121 | }, 122 | { 123 | "id": 13, 124 | "order_id": 5, 125 | "history_type": 0, 126 | "uid": 2, 127 | "goods_id": 1, 128 | "amount": 18.9, 129 | "status": 0, 130 | "create_time": "2024-10-04 12:09:10" 131 | }, 132 | { 133 | "id": 14, 134 | "order_id": 5, 135 | "history_type": 1, 136 | "uid": 2, 137 | "goods_id": 1, 138 | "amount": 81.9, 139 | "status": 0, 140 | "create_time": "2024-10-06 15:09:10" 141 | } 142 | ] -------------------------------------------------------------------------------- /examples/joins/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 2, 5 | "goods_id": 1, 6 | "amount": 9.6, 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 1, 12 | "goods_id": 1, 13 | "amount": 7.6, 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 2, 19 | "goods_id": 2, 20 | "amount": 3, 21 | "status": 0 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 1, 26 | "goods_id": 1, 27 | "amount": 8, 28 | "status": 0 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "goods_id": 1, 34 | "amount": 8, 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "uid": 2, 40 | "goods_id": 2, 41 | "amount": 7, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/joins/data/services.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "ServiceA", 4 | "device_code": "123" 5 | }, 6 | { 7 | "name": "ServiceA", 8 | "device_code": "124" 9 | }, 10 | { 11 | "name": "ServiceA", 12 | "device_code": "125" 13 | }, 14 | { 15 | "name": "ServiceB", 16 | "device_code": "123" 17 | } 18 | ] -------------------------------------------------------------------------------- /examples/joins/data/users.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "name": "王五", 5 | "gender": "男", 6 | "status": 0 7 | }, 8 | { 9 | "uid": 2, 10 | "name": "李四", 11 | "gender": "女", 12 | "status": 0 13 | } 14 | ] -------------------------------------------------------------------------------- /examples/joins/inner_join.sql: -------------------------------------------------------------------------------- 1 | 2 | select a.goods_name, b.order_id, c.name from `data/goodses.json` a 3 | left join `data/orders.json` b on b.goods_id=a.goods_id and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 4 | left join `data/users.json` c on c.uid=b.uid and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 5 | where a.status=0; 6 | 7 | select a.goods_name, b.order_id, c.name from `data/goodses.json` a 8 | inner join `data/orders.json` b on b.goods_id=a.goods_id and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 9 | left join `data/users.json` c on c.uid=b.uid and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 10 | where a.status=0; 11 | 12 | select a.goods_name, max(b.order_id) as latest_order_id, c.name, sum(if(b.order_id is not null, 1, 0)), count(distinct c.uid) as user_cnt, sum(b.amount) as total_amount from `data/goodses.json` a 13 | left join `data/orders.json` b on b.goods_id=a.goods_id and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 14 | left join `data/users.json` c on c.uid=b.uid and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 15 | where a.status=0 16 | group by a.goods_name; 17 | 18 | select a.goods_name, max(b.order_id) as latest_order_id, c.name, sum(if(b.order_id is not null, 1, 0)) from `data/goodses.json` a 19 | inner join `data/orders.json` b on b.goods_id=a.goods_id and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 20 | left join `data/users.json` c on c.uid=b.uid and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 21 | where a.status=0 22 | group by c.name; 23 | 24 | select a.goods_name, b.order_id, c.name from `data/goodses.json` a, `data/orders.json` b, `data/users.json` c 25 | where a.status=0 and b.goods_id=a.goods_id and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 26 | and c.uid=b.uid and c.status=0 and c.uid in (select uid from `data/users.json` where status=0); 27 | 28 | select a.goods_name, b.order_id, c.name from `data/goodses.json` a, `data/orders.json` b, `data/users.json` c 29 | where a.status=0 and b.goods_id=a.goods_id and c.uid=b.uid; -------------------------------------------------------------------------------- /examples/joins/left_join.sql: -------------------------------------------------------------------------------- 1 | 2 | select a.order_id, b.name, c.goods_name from `data/orders.json` a 3 | join `data/users.json` b on a.uid=b.uid and b.status=0 and b.gender in ('男', '女') 4 | join `data/goodses.json` c on a.goods_id=c.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 5 | where a.status=0; 6 | 7 | select a.order_id, b.name, c.goods_name from `data/orders.json` a 8 | left join `data/users.json` b on a.uid=b.uid and b.status=0 and convert_string(b.gender) in ('男', '女') 9 | left join `data/goodses.json` c on convert_string(a.goods_id)=convert_string(c.goods_id) and convert_int(c.status)=0 and c.uid in (select uid from `data/users.json` where status=0) 10 | where a.status=0 and b.status=0 and c.status=0 and convert_string(b.gender) in ('男', '女') and c.uid in (select uid from `data/users.json` where status=0); 11 | 12 | select a.order_id, b.name, c.goods_name, count(*) as cnt from `data/orders.json` a 13 | join `data/users.json` b on a.uid=b.uid and b.status=0 and b.gender in ('男', '女') 14 | join `data/goodses.json` c on a.goods_id=c.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 15 | where a.status=0 16 | group by b.name; 17 | 18 | select a.order_id, b.name, c.goods_name, count(distinct b.uid) as ucnt, sum(a.amount) from `data/orders.json` a 19 | left join `data/users.json` b on a.uid=b.uid and b.status=0 and convert_string(b.gender) in ('男', '女') 20 | left join `data/goodses.json` c on convert_string(a.goods_id)=convert_string(c.goods_id) and convert_int(c.status)=0 and c.uid in (select uid from `data/users.json` where status=0) 21 | where a.status=0 and b.status=0 and c.status=0 and convert_string(b.gender) in ('男', '女') and c.uid in (select uid from `data/users.json` where status=0) 22 | group by c.goods_name; 23 | 24 | select distinct b.name, c.goods_name from `data/orders.json` a 25 | join `data/users.json` b on a.uid=b.uid and b.status=0 and b.gender in ('男', '女') 26 | left join `data/goodses.json` c on a.goods_id=c.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 27 | where a.status=0; 28 | 29 | select a.code, c.name, d.goods_name from `data/devices.json` a 30 | left join `data/orders.json` b on a.uid=b.uid and a.out_order_no=b.`order_id[int]` 31 | left join `data/users.json` c on a.uid=c.uid 32 | left join `data/goodses.json` d on b.goods_id=d.goods_id 33 | where d.goods_name is not null; 34 | 35 | select a.order_id, c.name from `data/orders.json` a 36 | left join `data/devices.json` b on a.order_id=b.`out_order_no[varchar]` and b.order_type=1 37 | left join `data/services.json` c on b.code=c.device_code 38 | where c.name is not null; 39 | 40 | select a.code, c.name, d.goods_name from `data/devices.json` a 41 | left join `data/orders.json` b on a.uid=b.uid and concat(a.out_order_no, 'HHH')=concat(b.`order_id[int]`, 'HHH') 42 | left join `data/users.json` c on a.uid=c.uid 43 | left join `data/goodses.json` d on b.goods_id=d.goods_id 44 | where d.goods_name is not null; 45 | 46 | select a.name, b.order_id, c.history_type, d.goods_name, count(*) as cnt, sum(c.amount) as total_amount from `data/users.json` a 47 | left join `data/orders.json` b on a.uid = b.uid 48 | left join `data/order_historys.json` c on b.order_id = c.order_id 49 | left join `data/goodses.json` d on b.goods_id = d.goods_id 50 | where a.uid>0 and b.order_id <= 10 group by b.order_id, c.history_type having cnt>1 order by order_id; 51 | 52 | select order_id, name, goods_name, history_type, count(*) as cnt, sum(amount) as total_amount from ( 53 | select a.name, b.order_id, c.history_type, c.amount, d.goods_name from `data/users.json` a 54 | left join `data/orders.json` b on a.uid = b.uid 55 | left join `data/order_historys.json` c on b.order_id = c.order_id 56 | left join `data/goodses.json` d on b.goods_id = d.goods_id 57 | ) aa where order_id <= 10 group by order_id, history_type having cnt>1 order by order_id; -------------------------------------------------------------------------------- /examples/joins/right_join.sql: -------------------------------------------------------------------------------- 1 | 2 | select b.order_id, a.name, c.goods_name from `data/users.json` a 3 | right join `data/orders.json` b on b.uid=a.uid and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 4 | left join `data/goodses.json` c on c.goods_id=b.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 5 | where a.status=0 and a.gender in ('男', '女'); 6 | 7 | select b.order_id, a.name, c.goods_name, count(*) as cnt from `data/users.json` a 8 | right join `data/orders.json` b on b.uid=a.uid and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 9 | left join `data/goodses.json` c on c.goods_id=b.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 10 | where a.status=0 and a.gender in ('男', '女') group by a.name; 11 | 12 | select b.order_id, a.name, c.goods_name, count(distinct a.uid) as cnt, sum(b.amount) from `data/users.json` a 13 | right join `data/orders.json` b on b.uid=a.uid and b.status=0 and b.uid in (select uid from `data/users.json` where status=0) 14 | left join `data/goodses.json` c on c.goods_id=b.goods_id and c.status=0 and c.uid in (select uid from `data/users.json` where status=0) 15 | where a.status=0 and a.gender in ('男', '女') group by c.goods_name; -------------------------------------------------------------------------------- /examples/json/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/json/README.md -------------------------------------------------------------------------------- /examples/json/json.sql: -------------------------------------------------------------------------------- 1 | 2 | SET @j = '{"a": 1, "b": 2, "c": {"d": 4}}'; 3 | SET @j2 = '1'; 4 | SELECT JSON_CONTAINS(@j, @j2, '$.a'); 5 | SELECT JSON_CONTAINS(@j, @j2, '$.b'); 6 | SET @j2 = '{"d": 4}'; 7 | SELECT JSON_CONTAINS(@j, @j2, '$.a'); 8 | SELECT JSON_CONTAINS(@j, @j2, '$.c'); 9 | 10 | SET @j = '{"a": 1, "b": 2, "c": {"d": 4}}'; 11 | SELECT JSON_CONTAINS_PATH(@j, 'one', '$.a', '$.e'); 12 | SELECT JSON_CONTAINS_PATH(@j, 'all', '$.a', '$.e'); 13 | SELECT JSON_CONTAINS_PATH(@j, 'one', '$.c.d'); 14 | SELECT JSON_CONTAINS_PATH(@j, 'one', '$.a.d'); 15 | 16 | SELECT JSON_EXTRACT('[10, 20, [30, 40]]', '$[1]'); 17 | SELECT JSON_EXTRACT('[10, 20, [30, 40]]', '$[2][*]'); 18 | SELECT JSON_EXTRACT('[10, 20, [{"a":30}, {"b":40}]]', '$[2][*]["a"]'); 19 | 20 | SELECT JSON_DEPTH('{}'), JSON_DEPTH('[]'), JSON_DEPTH('true'); 21 | SELECT JSON_DEPTH('[10, 20]'), JSON_DEPTH('[[], {}]'); 22 | SELECT JSON_DEPTH('[10, {"a": 20}]'); 23 | 24 | SELECT JSON_KEYS('{"a": 1, "b": {"c": 30}}'); 25 | SELECT JSON_KEYS('{"a": 1, "b": {"c": 30}}', '$.b'); 26 | 27 | SELECT JSON_LENGTH('[1, 2, {"a": 3}]'); 28 | SELECT JSON_LENGTH('{"a": 1, "b": {"c": 30}}'); 29 | SELECT JSON_LENGTH('{"a": 1, "b": {"c": 30}}', '$.b'); 30 | 31 | SELECT JSON_VALID('{"a": 1}'); 32 | SELECT JSON_VALID('hello'), JSON_VALID('"hello"'); 33 | 34 | SET @j = '{"a": 1, "b": 2, "c": {"d": 4}}'; 35 | SELECT JSON_SET(@j, '$.a', 2), JSON_SET(@j, '$.c.d', 2); 36 | SELECT JSON_SET('"1"', '$[0]', 'a'), JSON_SET('"1"', '$[2]', 'a'); 37 | SELECT JSON_SET('["1"]', '$[0]', 'a'), JSON_SET('["1"]', '$[2]', 'a'); 38 | 39 | SELECT JSON_REMOVE(@j, '$.a', '$.c.d'), JSON_REMOVE(@j, '$.c.a'); 40 | SELECT JSON_REMOVE('"1"', '$[0]'), JSON_REMOVE('"1"', '$[2]'); 41 | SELECT JSON_REMOVE('["1"]', '$[0]'), JSON_REMOVE('["1"]', '$[2]'); -------------------------------------------------------------------------------- /examples/logic_operation/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 2, 5 | "goods_id": 1, 6 | "amount": 9.6, 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 1, 12 | "goods_id": 1, 13 | "amount": 0.6, 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 2, 19 | "goods_id": 2, 20 | "amount": 3, 21 | "status": 0 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 3, 26 | "goods_id": 1, 27 | "amount": 8, 28 | "status": 0 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "goods_id": 1, 34 | "amount": 8, 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "uid": 4, 40 | "goods_id": 2, 41 | "amount": 7, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/logic_operation/logic_operation.sql: -------------------------------------------------------------------------------- 1 | 2 | select 1 < 2 as a, 1 <= '2' as b, '123' > '23' as c, true >= 1 as d, false != 1 as f; 3 | 4 | select 1 + 2 * 3 > 5 as a, '1' + 2 * '3' < 5 as b; 5 | 6 | select 1 = 1 as a, 1 = '1' as b, 1 = 'a' as c, 1 = '1a' as d; 7 | 8 | select not 1 < 2 as a, not 1 <= '2' as b, not '123' > '23' as c, not true >= 1 as d, not false <> 1 as f; 9 | 10 | select not 1 + 2 * 3 > 5 as a, not '1' + 2 * '3' < 5 as b, not true as c, not 1 as d; 11 | 12 | select 'abc' like '%' as a, 'abc' like '%%' as b, 'abc' like '%a%' as c, 'abc' like '%bc' as d, 'abc' like 'ab%' as e, 'abc' like 'ac%' as f; 13 | 14 | select 'abc' not like '%' as a, 'abc' not like '%%' as b, 'abc' not like '%a%' as c, 'abc' not like '%bc' as d, 'abc' not like 'ab%' as e, 'abc' not like 'ac%' as f; 15 | 16 | select 'a' in ('a', 'b', 'c') as a, 1 in ('a', 'b', 'c') as b, 1 in (1, 2, 3) as c, 'a' in (1, 2, 3) as d; 17 | 18 | select 'a' not in ('a', 'b', 'c') as a, 1 not in ('a', 'b', 'c') as b, 1 not in (1, 2, 3) as c, 'a' not in (1, 2, 3) as d; 19 | 20 | select if(amount > 0 and status=0, 1, 0) as a, case when amount <= 0 then 'A' when amount > 0 and amount < 1 then 'B' when amount between 1 and 10 'C' else 'D' end as b from `data/orders.json` where uid=1; 21 | 22 | select sum(amount) as a, if(sum(amount) > 0, 1, 0) as b, case when sum(amount) <= 0 then 'A' when sum(amount) > 0 and sum(amount) < 1 then 'B' when sum(amount) between 1 and 100 'C' else 'D' end as c from `data/orders.json`; 23 | 24 | select order_id, uid, goods_id, amount from `data/orders.json` where uid != 3 and uid != 4 and uid > 0 and uid < 10 order by order_id desc limit 2, 1; 25 | 26 | select * from `data/orders.json` where uid != 3 and uid != 4 and uid > 0 and uid < 10 and status=0 order by order_id desc limit 1 offset 2; 27 | 28 | select '1abcd.*b2' like '%ab.*%' as a, REGEXP_LIKE('fo\nfo', '^fo$', 'm') as b, regexp_instr('dog cat dog', 'dog', 1, 1, 1) as c, regexp_replace('a b c', 'b', 'X') as d, regexp_substr('abc def ghi', '[a-z]+') as e; 29 | 30 | select '1aa2' like '%aa%' as a, regexp_like('1AA2', '.*aa.*', 'c') as b, regexp_instr('a bb c d BB e', 'bb', 1, 2, 1) as c, regexp_replace('abc def ghi', '[a-z]+', 'X', 1, 2) as d, regexp_substr('abc def ghi', '[a-z]+', 1, 2) as e; -------------------------------------------------------------------------------- /examples/loop/loop.sql: -------------------------------------------------------------------------------- 1 | 2 | WITH RECURSIVE table_0 AS ( 3 | SELECT 1 AS n 4 | ), 5 | table_1 AS ( 6 | SELECT n 7 | FROM table_0 8 | UNION ALL 9 | SELECT n + 1 10 | FROM table_1 11 | WHERE n < 4 12 | ) 13 | INSERT INTO table_2 SELECT n FROM table_1 table_2; 14 | 15 | select n from table_2; -------------------------------------------------------------------------------- /examples/mathematical/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/mathematical/README.md -------------------------------------------------------------------------------- /examples/mathematical/mathematical.sql: -------------------------------------------------------------------------------- 1 | 2 | select 2 + 1 as a, 2 - 1 as b, 2 * 1 as c, 2 / 1 as d; 3 | 4 | select 2 & 1 as a, 2 | 1 as b, 2 ^ 1 as c, ~2 as d; 5 | 6 | select '2' + 1 as a, '2022-10-11' / 1 as b, '111abc' * 2 as c; 7 | 8 | select null + 1 as a, true + 1 as b, false + 1 as c; 9 | 10 | select datetime('2023-04-02 10:08:06') / 2 as a, date('2023-04-02 10:08:06') / 2 as b, time('2023-04-02 10:08:06') / 2 as c; 11 | 12 | select add((1, 2, 3), 1) as a, mul((1, 2, 3), 4) as b; 13 | 14 | select 1 + 2 * 3 - 2 as a, (1 + 2) * (3 - 2) as b; -------------------------------------------------------------------------------- /examples/memory_temporary_storage/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/memory_temporary_storage/README.md -------------------------------------------------------------------------------- /examples/memory_temporary_storage/data/user.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "Email": "bm6U11zDIspdNW1iQiVZdHX8uqOWZe0cers9BZEcCrE=", 4 | "FirstName": "John", 5 | "Id": "aa7e941b-d399-4bec-0ba4-08d8dd2f9239", 6 | "LastName": "Doe" 7 | } 8 | ] -------------------------------------------------------------------------------- /examples/memory_temporary_storage/memory_temporary_storage.sql: -------------------------------------------------------------------------------- 1 | 2 | insert into temporary_a select Id, Email, FirstName, LastName from `data/user.json`; 3 | 4 | insert into temporary_b (Id, Name) VALUES ('aa7e941b-d399-4bec-0ba4-08d8dd2f9239', 'LiMei'); 5 | 6 | select * from temporary_a; 7 | 8 | select * from temporary_b; 9 | 10 | select a.Id, a.Email, b.Name, a.FirstName, a.LastName from temporary_a a join temporary_b b on a.Id=b.Id; -------------------------------------------------------------------------------- /examples/nginx-log/README.md: -------------------------------------------------------------------------------- 1 | # 运行 2 | 3 | ```bash 4 | syncany-sql ip-top-3.sql 5 | ``` -------------------------------------------------------------------------------- /examples/nginx-log/ip-top-3.sql: -------------------------------------------------------------------------------- 1 | #!/bin/env syncany-sql 2 | 3 | SELECT seg0 AS ip, COUNT(*) AS cnt FROM `file://data/access.log?sep= ` GROUP BY seg0 ORDER BY cnt DESC LIMIT 3; -------------------------------------------------------------------------------- /examples/parameter_variable/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/parameter_variable/README.md -------------------------------------------------------------------------------- /examples/parameter_variable/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 1, 5 | "amount": 10, 6 | "order_at": "2023-01-12 10:11:12", 7 | "finish_at": "2023-04-12 15:11:12" 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 2, 12 | "amount": 5, 13 | "order_at": "2023-03-05 00:11:12", 14 | "finish_at": "2023-05-05 19:11:12" 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 1, 19 | "amount": 45, 20 | "order_at": "2023-04-12 15:11:13", 21 | "finish_at": "2023-07-12 22:11:12" 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 1, 26 | "amount": 322, 27 | "order_at": "2023-07-23 12:11:12", 28 | "finish_at": "2023-10-12 15:11:12" 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "amount": 3445, 34 | "order_at": "2023-05-05 19:11:13", 35 | "finish_at": "2023-08-12 15:11:12" 36 | } 37 | ] -------------------------------------------------------------------------------- /examples/parameter_variable/parameter_assign.sql: -------------------------------------------------------------------------------- 1 | 2 | set @a = 1; 3 | 4 | select @a as a, @a := @a + 1 as b, sum(@a := @a + 1) as c, if(@a < 2, @a, @a := @a + 1) as d, @a as e from `data/orders.json` group by order_id; 5 | 6 | select row_index() as a, row_last(order_id) as b, order_id as c, @a := @a + 1 as d from `data/orders.json`; 7 | 8 | select row_index() as a, row_last(order_id) as b, order_id as c, @a := @a + 1 as d from `data/orders.json`; 9 | 10 | -- 计算用户连续订单信息 11 | 12 | set @sindex = 1; 13 | 14 | select uid, count(*) as cnt, sum(amount) as amount, min(order_at) as start_at, max(finish_at) as end_at from ( 15 | select *, if(uid = last_uid and TIMESTAMPDIFF('SECOND', last_finish_at, order_at) <= 1, @sindex, @sindex := @sindex + 1) as sindex from ( 16 | select *, row_last(uid) as last_uid, row_last(finish_at) as last_finish_at from ( 17 | select order_id, uid, amount, order_at, finish_at from `data/orders.json` order by uid, order_at, finish_at 18 | ) aaa 19 | ) aa 20 | ) a group by uid, sindex; -------------------------------------------------------------------------------- /examples/parameter_variable/parameter_variable.sql: -------------------------------------------------------------------------------- 1 | 2 | set @aaa=1; 3 | set @bbb=@aaa + 1; 4 | 5 | -- 获取环境变量或命令行参数 6 | set @ccc='${PATH:}'; 7 | 8 | select @aaa as a, @bbb as b, @ccc as c; 9 | select @aaa + @bbb as a, math$pow(@bbb, 2) as b, length(@ccc) as c; 10 | 11 | select 2 as a into @aaa; 12 | select a into @bbb from ( 13 | select yield_array(json$decode('[1, 3]')) as a 14 | ) t; 15 | select a, b into @ccc from ( 16 | select 1 as a, 'abc' as b 17 | ) t; 18 | select a, b into @ddd from ( 19 | select yield_array(json$decode('[1, 3]')) as a, yield_array(json$decode('["abc", "efg"]')) as b 20 | ) t; 21 | 22 | select @aaa as a, @bbb as b, @ccc as c, @ddd as d; -------------------------------------------------------------------------------- /examples/pyeval/README.md: -------------------------------------------------------------------------------- 1 | # pyeval 直接执行python代码 2 | 3 | ## pyeval(pycode, [args1, [args2, ...]]) 4 | 5 | ## 使用方式 6 | 7 | - 参数通过 args获取,如第一个参数 args[0], 第二个参数 args[1],依次类推 8 | - 支持内建模块 datetime、time、math、random、string、uuid、base64、hashlib、pickle、json、re -------------------------------------------------------------------------------- /examples/pyeval/pyeval.sql: -------------------------------------------------------------------------------- 1 | 2 | select pyeval('1 + 2') as a, pyeval('",".join([str(i) for i in range(4)])') as b, pyeval('args[0] + args[1]', 1, 2) as c, pyeval('json.loads(args[0])', '"123"') as d; -------------------------------------------------------------------------------- /examples/raw_query/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/raw_query/README.md -------------------------------------------------------------------------------- /examples/raw_query/raw_query.sql: -------------------------------------------------------------------------------- 1 | 2 | -- SQL型RAW SELECT查询,raw和endraw之间的SQL会以原始SQL提交到数据库执行 3 | -- raw括号中第一个参数指定配置的数据库名,第二个参数为该RAW SQL取一个名称 4 | -- 注意RAW SQL直接提交到数据库执行,其中数据库名和表名称都应直接和数据库一致 5 | select * from /* raw(mysql_cdtx.test2) */ ( 6 | select a.customer_id, b.mobile, b.nickname, c.birthday, c.name, c.gender 7 | from cdtx.t_customer a join cdtx.t_user b on a.uid=b.uid and b.is_deleted=0 8 | join cdtx.t_user_profile c on b.uid=c.uid and c.is_deleted=0 9 | where a.is_deleted=0 and a.workshop_id in (77, 78) 10 | ) /* endraw */ b; 11 | 12 | -- 非SQL型数据库编写RAW QUERY可省略“*/ (”和“) /*” 13 | -- 如mongo的RAW QUERY即为aggregate查询语句 14 | select msg_text from /* raw(mongo_instant_messaging.t_im_group_messages) 15 | [] 16 | endraw */ a where msg_type='text' order by a.msg_time desc; -------------------------------------------------------------------------------- /examples/strings/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/strings/README.md -------------------------------------------------------------------------------- /examples/strings/strings.sql: -------------------------------------------------------------------------------- 1 | 2 | select concat('a', 'b', 'c') as a, substring('abc', 1, 2) as b, lower('AbC') as c, upper('AbC') as d; 3 | 4 | select trim(' a b c ') as a, repeat('a', 3) as b, reverse('abc') as c, strcmp('abc', 'bc') as d; 5 | 6 | select startswith('abc', 'ab') as a, endswith('abc', 'bc') as b, contains('abc', 'bc') as c; 7 | 8 | select concat('a', null) as a, concat('a ', datetime('2023-04-02 10:08:06'), ' ', date('2023-04-02 10:08:06'), ' ', time('2023-04-02 10:08:06'), ' ', true, ' ', false, ' ', 1, ' ', 1.23) as b; -------------------------------------------------------------------------------- /examples/subquery/data/goodses.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "goods_id": 1, 4 | "goods_name": "青菜" 5 | }, 6 | { 7 | "goods_id": 2, 8 | "goods_name": "白菜" 9 | }, 10 | { 11 | "goods_id": 3, 12 | "goods_name": "萝卜" 13 | } 14 | ] -------------------------------------------------------------------------------- /examples/subquery/data/order_historys.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "order_id": 1, 5 | "history_type": 1, 6 | "uid": 2, 7 | "goods_id": 1, 8 | "amount": 9.6, 9 | "status": 0, 10 | "create_time": "2024-10-01 10:09:10" 11 | }, 12 | { 13 | "id": 2, 14 | "order_id": 1, 15 | "history_type": 0, 16 | "uid": 2, 17 | "goods_id": 1, 18 | "amount": 5.2, 19 | "status": 0, 20 | "create_time": "2024-10-02 14:09:10" 21 | }, 22 | { 23 | "id": 3, 24 | "order_id": 2, 25 | "history_type": 1, 26 | "uid": 1, 27 | "goods_id": 1, 28 | "amount": 7.6, 29 | "status": 0, 30 | "create_time": "2024-10-01 12:09:10" 31 | }, 32 | { 33 | "id": 4, 34 | "order_id": 1, 35 | "history_type": 1, 36 | "uid": 2, 37 | "goods_id": 1, 38 | "amount": 19.1, 39 | "status": 0, 40 | "create_time": "2024-10-06 11:09:10" 41 | }, 42 | { 43 | "id": 5, 44 | "order_id": 3, 45 | "history_type": 1, 46 | "uid": 2, 47 | "goods_id": 2, 48 | "amount": 3, 49 | "status": 0, 50 | "create_time": "2024-10-02 09:09:10" 51 | }, 52 | { 53 | "id": 6, 54 | "order_id": 3, 55 | "history_type": 0, 56 | "uid": 2, 57 | "goods_id": 2, 58 | "amount": 3.3, 59 | "status": 0, 60 | "create_time": "2024-10-04 09:09:10" 61 | }, 62 | { 63 | "id": 7, 64 | "order_id": 3, 65 | "history_type": 1, 66 | "uid": 2, 67 | "goods_id": 2, 68 | "amount": 4.3, 69 | "status": 0, 70 | "create_time": "2024-10-08 09:09:10" 71 | }, 72 | { 73 | "id": 8, 74 | "order_id": 4, 75 | "history_type": 1, 76 | "uid": 1, 77 | "goods_id": 1, 78 | "amount": 8, 79 | "status": 0, 80 | "create_time": "2024-10-01 10:09:10" 81 | }, 82 | { 83 | "id": 9, 84 | "order_id": 3, 85 | "history_type": 0, 86 | "uid": 2, 87 | "goods_id": 7.12, 88 | "amount": 3, 89 | "status": 0, 90 | "create_time": "2024-10-11 09:09:10" 91 | }, 92 | { 93 | "id": 10, 94 | "order_id": 3, 95 | "history_type": 0, 96 | "uid": 2, 97 | "goods_id": 2, 98 | "amount": 31.1, 99 | "status": 0, 100 | "create_time": "2024-10-14 19:09:10" 101 | }, 102 | { 103 | "id": 11, 104 | "order_id": 5, 105 | "history_type": 1, 106 | "uid": 2, 107 | "goods_id": 1, 108 | "amount": 8, 109 | "status": 0, 110 | "create_time": "2024-10-01 10:09:10" 111 | }, 112 | { 113 | "id": 12, 114 | "order_id": 6, 115 | "history_type": 1, 116 | "uid": 2, 117 | "goods_id": 2, 118 | "amount": 7, 119 | "status": 0, 120 | "create_time": "2024-10-01 10:09:10" 121 | }, 122 | { 123 | "id": 13, 124 | "order_id": 5, 125 | "history_type": 0, 126 | "uid": 2, 127 | "goods_id": 1, 128 | "amount": 18.9, 129 | "status": 0, 130 | "create_time": "2024-10-04 12:09:10" 131 | }, 132 | { 133 | "id": 14, 134 | "order_id": 5, 135 | "history_type": 1, 136 | "uid": 2, 137 | "goods_id": 1, 138 | "amount": 81.9, 139 | "status": 0, 140 | "create_time": "2024-10-06 15:09:10" 141 | } 142 | ] -------------------------------------------------------------------------------- /examples/subquery/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 2, 5 | "goods_id": 1, 6 | "amount": 9.6, 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 1, 12 | "goods_id": 1, 13 | "amount": 7.6, 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 2, 19 | "goods_id": 2, 20 | "amount": 3, 21 | "status": 0 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 1, 26 | "goods_id": 1, 27 | "amount": 8, 28 | "status": 0 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "goods_id": 1, 34 | "amount": 8, 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "uid": 2, 40 | "goods_id": 2, 41 | "amount": 7, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/subquery/data/users.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "name": "王五" 5 | }, 6 | { 7 | "uid": 2, 8 | "name": "李四" 9 | } 10 | ] -------------------------------------------------------------------------------- /examples/subquery/subquery.sql: -------------------------------------------------------------------------------- 1 | select uid, latest_order_id, cnt, total_amount from ( 2 | select uid, max(order_id) as latest_order_id, count(*) as cnt, sum(amount) as total_amount from `data/orders.json` group by uid 3 | ) a where total_amount>20; 4 | 5 | select order_id, uid, amount, (select count(*) from `data/order_historys.json` c where c.order_id=a.order_id and status=0) as history_count, 6 | exists(select count(*) from `data/order_historys.json` c where c.order_id=a.order_id and status=0) as history_exists 7 | from `data/orders.json` as a where (select sum(amount) from `data/orders.json` as b where a.order_id=b.order_id and status=0)<5; 8 | 9 | select goods_id, goods_name, (select count(*) from `data/orders.json` c where c.goods_id=a.goods_id and status=0) as order_count, 10 | exists(select count(*) from `data/orders.json` c where c.goods_id=a.goods_id and status=0) as order_exists 11 | from `data/goodses.json` as a; 12 | 13 | select order_id, uid, amount, (select count(*) from `data/order_historys.json` c where c.order_id+1=a.order_id+1 and status=0) as history_count, 14 | exists(select count(*) from `data/order_historys.json` c where c.order_id=a.order_id and status=0) as history_exists 15 | from `data/orders.json` as a where (select sum(amount) from `data/orders.json` as b where a.order_id=(b.order_id+1)-1 and status=0)<5; 16 | 17 | select goods_id, goods_name, (select count(*) from `data/orders.json` c where c.goods_id+1=a.goods_id+1 and status=0) as order_count, 18 | exists(select count(*) from `data/orders.json` c where c.goods_id=(a.goods_id+1)-1 and status=0) as order_exists 19 | from `data/goodses.json` as a; 20 | 21 | select order_id, count(*) as cnt, sum(amount) as total_amount from `data/orders.json` where order_id in (select (1, 2, 3) as order_id) group by order_id; 22 | 23 | select uid, ( 24 | select sum(amount) as total_amount from `data/orders.json` as b where a.uid=b.uid group by b.uid 25 | ) as total_amount from `data/users.json` a where uid in (select uid from `data/orders.json`) 26 | and uid in (select uid from `data/order_historys.json` group by uid); 27 | 28 | select order_id, uid, amount, (select count(*) from `data/order_historys.json` c where c.order_id+1=a.order_id+1 and status=0)>0 as has_history, 29 | exists(select count(*) from `data/order_historys.json` c where c.order_id=a.order_id and status=0) as history_exists 30 | from `data/orders.json` as a where ( 31 | select sum(amount) from `data/orders.json` as b where a.order_id=(b.order_id+1)-1 and status=0 and exists( 32 | select uid from `data/users.json` d where b.uid=d.uid 33 | ) 34 | )<5; 35 | 36 | select order_id, uid, amount, (select count(*) from `data/order_historys.json` where order_id+1=a.order_id+1 and status=0)>0 as has_history, 37 | exists(select count(*) from `data/order_historys.json` where order_id=a.order_id and status=0) as history_exists 38 | from `data/orders.json` as a where ( 39 | select sum(amount) from `data/orders.json` as b where a.order_id=(b.order_id+1)-1 and status=0 and exists( 40 | select uid from `data/users.json` where b.uid=uid 41 | ) 42 | )<5; 43 | 44 | select order_id, uid, amount, (select count(*) from `data/order_historys.json` where order_id+1=a.order_id+1 and status=0)>0 as has_history, 45 | exists(select count(*) from `data/order_historys.json` where order_id=convert_int(a.order_id) and status=0) as history_exists 46 | from `data/orders.json` as a where ( 47 | select sum(amount) from `data/orders.json` as b where convert_int(a.order_id)=convert_int((b.order_id+1)-1) and status=0 and exists( 48 | select uid from `data/users.json` where b.uid=uid 49 | ) 50 | )<5; -------------------------------------------------------------------------------- /examples/time_window/README.md: -------------------------------------------------------------------------------- 1 | # 对齐到时间点 -------------------------------------------------------------------------------- /examples/time_window/time_window.sql: -------------------------------------------------------------------------------- 1 | 2 | select time_window('1S') as a, time_window('15S') as b, time_window('1M') as c, time_window('15M') as d, time_window('2H') as e, time_window('1d') as f; 3 | 4 | select time_window('1S', datetime('2023-07-03 12:24:27')) as a, time_window('15S', datetime('2023-07-03 12:24:27')) as b, time_window('1M', datetime('2023-07-03 12:24:27')) as c, 5 | time_window('15M', datetime('2023-07-03 12:24:27')) as d, time_window('2H', datetime('2023-07-03 12:24:27')) as e, time_window('1d', datetime('2023-07-03 12:24:27')) as f; 6 | 7 | select time_window('1S', datetime('2023-07-03 12:24:27'), 3) as a, time_window('15S', datetime('2023-07-03 12:24:27'), 3) as b, time_window('1M', datetime('2023-07-03 12:24:27'), 3) as c, 8 | time_window('15M', datetime('2023-07-03 12:24:27'), 3) as d, time_window('2H', datetime('2023-07-03 12:24:27'), 3) as e, time_window('1d', datetime('2023-07-03 12:24:27'), 3) as f; 9 | 10 | select time_window('15M', create_time), count(*) from `data/order.csv` group by time_window('15M', create_time) limit 10; -------------------------------------------------------------------------------- /examples/transform/README.md: -------------------------------------------------------------------------------- 1 | 2 | # transform 对查询结果执行变换 3 | 4 | ### SQL语法规则 5 | 6 | - from子查询且无别名 7 | - 不包含任何where、group、having、limit语句 8 | - select只查询一个值且是函数,同时为设置别名 9 | 10 | 满足以上条件的SQL语句会被编译为transform 11 | 12 | ### transform函数定义 13 | 14 | - 第一个参数子查询结果数组 15 | - 其他为常量参数 16 | - 返回值为结果数组 17 | 18 | 使用是不写第一个子查询数组参数,执行时会自动传递该参数 -------------------------------------------------------------------------------- /examples/transform/data/data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "site_id": 8, 5 | "amount": 10, 6 | "create_date": "2023-01-03", 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "site_id": 15, 12 | "amount": 7.2, 13 | "create_date": "2023-01-03", 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "site_id": 8, 19 | "amount": 2.8, 20 | "create_date": "2023-01-05", 21 | "status": 1 22 | }, 23 | { 24 | "order_id": 4, 25 | "site_id": 28, 26 | "amount": 4.7, 27 | "create_date": "2023-01-05", 28 | "status": 1 29 | }, 30 | { 31 | "order_id": 5, 32 | "site_id": 8, 33 | "amount": 3.5, 34 | "create_date": "2023-01-05", 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "site_id": 34, 40 | "amount": 11.2, 41 | "create_date": "2023-01-07", 42 | "status": 0 43 | }, 44 | { 45 | "order_id": 7, 46 | "site_id": 8, 47 | "amount": 3.54, 48 | "create_date": "2023-01-08", 49 | "status": 0 50 | } 51 | ] -------------------------------------------------------------------------------- /examples/transform/data/sites.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "site_id": 8, 4 | "name": "黄豆网", 5 | "status": 0 6 | }, 7 | { 8 | "site_id": 15, 9 | "name": "青菜网", 10 | "status": 0 11 | }, 12 | { 13 | "site_id": 21, 14 | "name": "去啥网", 15 | "status": 0 16 | }, 17 | { 18 | "site_id": 26, 19 | "name": "汽车网", 20 | "status": 0 21 | }, 22 | { 23 | "site_id": 28, 24 | "name": "火箭网", 25 | "status": 0 26 | }, 27 | { 28 | "site_id": 34, 29 | "name": "卫星网", 30 | "status": 0 31 | } 32 | ] -------------------------------------------------------------------------------- /examples/transform/transform_customize.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/25 3 | # create by: snower 4 | 5 | from syncany.calculaters import register_calculater, TransformCalculater 6 | 7 | 8 | @register_calculater("transform_row_id") 9 | class RowIdTransformCalculater(TransformCalculater): 10 | def calculate(self, datas): 11 | if not datas: 12 | return datas 13 | row_id_index, keys = 1, (["row_id"] + list(datas[0].keys())) 14 | for data in datas: 15 | data["row_id"] = row_id_index 16 | row_id_index += 1 17 | self.update_outputer_schema(keys) 18 | return datas -------------------------------------------------------------------------------- /examples/transform/transform_customize.sql: -------------------------------------------------------------------------------- 1 | 2 | use `transform_customize`; 3 | 4 | select transform_row_id() from ( 5 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id group by a.site_id 6 | ); -------------------------------------------------------------------------------- /examples/transform/transform_h2v.sql: -------------------------------------------------------------------------------- 1 | 2 | insert into v2h_data select transform$v2h('name', 'create_date', 'amount') from ( 3 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 4 | ); 5 | 6 | /* 7 | 横向表转为纵向表 8 | 参数1:横向表头 9 | 参数2:纵向统计值 10 | 参数3:值,不传递不保留值 11 | 12 | 如以下表格: 13 | -------------------------------- 14 | | order_date | limei | wanzhi | 15 | -------------------------------- 16 | | 2022-01-01 | 5.5 | 8.2 | 17 | | 2022-01-02 | 4.3 | 1.8 | 18 | -------------------------------- 19 | 20 | 经过transform$v2h('name', 'order_date', 'amount')后变为: 21 | 22 | -------------------------------- 23 | | name | order_date | amount | 24 | -------------------------------- 25 | | limei | 2022-01-01 | 5.5 | 26 | | wanzhi | 2022-01-01 | 8.2 | 27 | | limei | 2022-01-02 | 4.3 | 28 | | wanzhi | 2022-01-02 | 1.8 | 29 | -------------------------------- 30 | */ 31 | 32 | select transform$h2v('name', 'create_date', 'amount') from ( 33 | select `create_date`, `黄豆网`, `青菜网`, `火箭网`, `卫星网` from v2h_data 34 | ); 35 | 36 | insert into test_data (order_date, limei, wanzhi) values ('2022-01-01', 5.5, 8.2), ('2022-01-02', 4.3, 1.8); 37 | 38 | select transform$h2v('name', 'order_date', 'amount') from ( 39 | select order_date, limei, wanzhi from test_data 40 | ); 41 | 42 | select transform$h2v('name', 'order_date') from ( 43 | select order_date, limei, wanzhi from test_data 44 | ); -------------------------------------------------------------------------------- /examples/transform/transform_h4v.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | 把kEY-VALUE转为横向表 4 | 5 | 如以下表格: 6 | ---------------------- 7 | | key | name | value | 8 | ---------------------- 9 | | order_id | limei | 1 | 10 | | goods | limei | 青菜 | 11 | | age | limei | 18 | 12 | | order_id | wanzhi | 2 | 13 | | goods | wanzhi | 白菜 | 14 | | age | wanzhi | 22 | 15 | | order_id | wanzhi | 3 | 16 | | goods | wanzhi | 青菜 | 17 | | age | wanzhi | 22 | 18 | ---------------------- 19 | 20 | 经过transform$h4v('name', 'key', 'value')后变为: 21 | 22 | ------------------------ 23 | | name | id | goods | age | 24 | ------------------------- 25 | | limei | 1 | 青菜 | 18 | 26 | | wanzhi | 2 | 白菜 | 22 | 27 | | wanzhi | 2 | 青菜 | 22 | 28 | ------------------------ 29 | */ 30 | 31 | insert into v4h_data1 select transform$v4h('key', 'value') from ( 32 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 33 | ); 34 | 35 | select transform$h4v('key', 'value') from ( 36 | select `key`, `value` from v4h_data1 37 | ); 38 | 39 | insert into v4h_data2 select transform$v4h('key', 'value', 'site_id') from ( 40 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 41 | ); 42 | 43 | select transform$h4v('key', 'value', 'site_id') from ( 44 | select `key`, `value`, `site_id` from v4h_data2 45 | ); 46 | 47 | insert into test_data1 (`key`, `name`, `value`) values ('order_id', 'limei', '1'), ('goods', 'limei', '青菜'), ('age', 'limei', '18'), 48 | ('order_id', 'wanzhi', '2'), ('goods', 'wanzhi', '白菜'), ('age', 'wanzhi', '22'), 49 | ('order_id', 'wanzhi', '3'), ('goods', 'wanzhi', '青菜'), ('age', 'wanzhi', '22'); 50 | 51 | select transform$h4v('key', 'value', 'name') from ( 52 | select `key`, `name`, `value` from test_data1 53 | ); 54 | 55 | insert into test_data2 (`key`, `value`) values ('order_id', '1'), ('name', 'limei'), ('goods', '青菜'), ('age', '18'), 56 | ('order_id', '2'), ('name', 'wanzhi'), ('goods', '白菜'), ('age', '22'), 57 | ('order_id', '3'), ('name', 'wanzhi'), ('goods', '青菜'), ('age', '22'); 58 | 59 | select transform$h4v('key', 'value') from ( 60 | select `key`, `value` from test_data2 61 | ); 62 | -------------------------------------------------------------------------------- /examples/transform/transform_uniqkv.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | 去重,重复行横向扩展 4 | 参数1:重复字段key 5 | 参数2:横向表头key 6 | 参数3:值key, 相同位置如有多个值数字则加和,否则最后一个值有效,无第三个参数时统计数量 7 | 8 | 如以下表格: 9 | --------------------------------------------- 10 | | id | name | order_date | amount | goods | 11 | --------------------------------------------- 12 | | 1 | limei | 2022-01-01 | 5.5 | 青菜 | 13 | | 2 | wanzhi | 2022-01-01 | 8.2 | 白菜 | 14 | | 3 | wanzhi | 2022-01-01 | 2.2 | 青菜 | 15 | --------------------------------------------- 16 | 17 | 经过transform$v2h('order_date', 'name', 'amount')后变为: 18 | 19 | ------------------------------------------------------------- 20 | | id | name | order_date | amount | goods | limei | wanzhi | 21 | --------------------------------------------- 22 | | 1 | limei | 2022-01-01 | 5.5 | 青菜 | 5.5 | 10.4 | 23 | -------------------------------------------------------------- 24 | */ 25 | 26 | select transform$uniqkv('create_date', 'name', 'amount') from ( 27 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 28 | ); 29 | 30 | insert into test_data (id, name, order_date, amount, goods) values (1, 'limei', '2022-01-01', 5.5, '青菜'), (2, 'wanzhi', '2022-01-01', 8.2, '白菜'), (3, 'wanzhi', '2022-01-01', 2.2, '青菜'); 31 | 32 | select transform$uniqkv('order_date', 'name', 'amount') from ( 33 | select id, name, order_date, amount, goods from test_data 34 | ); 35 | 36 | -- 不传第三参数统计数量 37 | select transform$uniqkv('order_date', 'name') from ( 38 | select id, name, order_date, amount, goods from test_data 39 | ); 40 | 41 | -- 转换为数字保留最新值 42 | select transform$uniqkv('order_date', 'name', 'amount') from ( 43 | select id, name, order_date, convert_string(amount, '%.02f') as amount, goods from test_data 44 | ); -------------------------------------------------------------------------------- /examples/transform/transform_v2h.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | 纵向表转为横向表 4 | 参数1:横向表头 5 | 参数2:纵向统计值 6 | 参数3:值,相同位置如有多个值数字则加和,否则最后一个值有效,无第三个参数时统计数量 7 | 8 | 如以下表格: 9 | --------------------------------------------- 10 | | id | name | order_date | amount | goods | 11 | --------------------------------------------- 12 | | 1 | limei | 2022-01-01 | 5.5 | 青菜 | 13 | | 2 | wanzhi | 2022-01-01 | 8.2 | 白菜 | 14 | | 3 | wanzhi | 2022-01-01 | 2.2 | 青菜 | 15 | --------------------------------------------- 16 | 17 | 经过transform$v2h('name', 'order_date', 'amount')后变为: 18 | 19 | -------------------------------- 20 | | order_date | limei | wanzhi | 21 | -------------------------------- 22 | | 2022-01-01 | 5.5 | 8.2 | 23 | -------------------------------- 24 | */ 25 | 26 | select transform$v2h('name', 'create_date', 'amount') from ( 27 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 28 | ); 29 | 30 | select transform$v2h('name', 'create_date') from ( 31 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 32 | ); 33 | 34 | insert into test_data (id, name, order_date, amount, goods) values (1, 'limei', '2022-01-01', 5.5, '青菜'), (2, 'wanzhi', '2022-01-01', 8.2, '白菜'), (3, 'wanzhi', '2022-01-01', 2.2, '青菜'); 35 | 36 | select transform$v2h('name', 'order_date', 'amount') from ( 37 | select id, name, order_date, amount from test_data 38 | ); 39 | 40 | -- 无第三参数统计数量 41 | select transform$v2h('name', 'order_date') from ( 42 | select id, name, order_date, amount from test_data 43 | ); 44 | 45 | -- 非数字值最后一个值有效 46 | select transform$v2h('name', 'order_date', 'goods') from ( 47 | select id, name, order_date, goods from test_data 48 | ); 49 | 50 | -- 不加和则转为字符串 51 | select transform$v2h('name', 'order_date', 'amount') from ( 52 | select id, name, order_date, convert_string(amount, '%.02f') as amount from test_data 53 | ); -------------------------------------------------------------------------------- /examples/transform/transform_v4h.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | 转为kEY-VALUE纵向表 4 | 参数1:key 5 | 参数2:value 6 | 7 | 如以下表格: 8 | -------------------- 9 | | id | name | age | 10 | -------------------- 11 | | 1 | limei | 18 | 12 | | 2 | wanzhi | 22 | 13 | -------------------- 14 | 15 | 经过transform$v4h('key', 'value', 'name')后变为: 16 | 17 | ------------------------- 18 | | key | value | name | 19 | ------------------------- 20 | | id | 1 | limei | 21 | | age | 18 | limei | 22 | | id | 2 | wanzhi | 23 | | age | 22 | wanzhi | 24 | ------------------------- 25 | */ 26 | 27 | select transform$v4h('key', 'value') from ( 28 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 29 | ); 30 | 31 | select transform$v4h('key', 'value', 'site_id') from ( 32 | select a.order_id, a.site_id, a.amount, a.create_date, b.name from `data/data.json` a join `data/sites.json` b on a.site_id=b.site_id 33 | ); 34 | 35 | insert into test_data (id, name, age) values (1, 'limei', 18), (2, 'wanzhi', 22); 36 | 37 | select transform$v4h('key', 'value') from ( 38 | select id, name, age from test_data 39 | ); 40 | 41 | select transform$v4h('key', 'value', 'name') from ( 42 | select id, name, age from test_data 43 | ); -------------------------------------------------------------------------------- /examples/type_annotation/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snower/syncany-sql/2006aa0274be42bbe55dd0279c076e4d684ccaa6/examples/type_annotation/README.md -------------------------------------------------------------------------------- /examples/type_annotation/data/data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "userId": 123, 4 | "order": { 5 | "orderId": 3243243, 6 | "orderAt": "2023-02-23 10:23:21", 7 | "goodses": [ 8 | { 9 | "goodsId": "6422a08fb4055348da72633b", 10 | "goodsName": "商品1" 11 | }, 12 | { 13 | "goodsId": "6422a0a4b4055348da72633c", 14 | "goodsName": "商品2" 15 | } 16 | ], 17 | "payInfo": { 18 | "payId": "da984ae6-cd3f-11ed-af1b-eb91b1b4fa12", 19 | "payTypes": [1, 3, 4], 20 | "amount": 12.9, 21 | "payAt": "2023-02-23 10:23:59" 22 | }, 23 | "channel.:0": "weixin", 24 | "channel.:1": "ali", 25 | "refundInfo": [ 26 | { 27 | "refundId": "e10907bc-cd3f-11ed-af1b-eb91b1b4fa12", 28 | "amount": 11.3, 29 | "payAt": "2023-02-24 01:23:59" 30 | } 31 | ], 32 | "address.mobile": "12345678911" 33 | }, 34 | "user.username": "snower", 35 | "user.nickname": "哈哈" 36 | } 37 | ] -------------------------------------------------------------------------------- /examples/type_annotation/type_annotation.sql: -------------------------------------------------------------------------------- 1 | 2 | insert into `orders` select * from `data/data.json`; 3 | 4 | -- 字符串 5 | select type(`order.orderId[str]`) as t, `order.orderId[str]` as v from orders; 6 | -- 数字格式化 7 | select type(`order.payInfo.amount[str %.02f]`) as t, `order.payInfo.amount[str %.02f]` as v from orders; 8 | 9 | -- 时间 10 | select type(`order.orderAt[datetime]`) as t, `order.orderAt[datetime]` as v from orders; 11 | -- 时间格式化为字符串 12 | select `orderAt`, type(`orderAt[str %Y-%m-%d]`) as t, `orderAt[str %Y-%m-%d]` as v from ( 13 | select `order.orderAt[datetime]` as orderAt from orders 14 | ) a; 15 | -- 时间转换为时间戳 16 | select `orderAt`, type(`orderAt[int]`) as t, `orderAt[int]` as v from ( 17 | select `order.orderAt[datetime]` as orderAt from orders 18 | ) a; 19 | 20 | -- ObjectId,如和mongo进行join查询时需要为转换对应类型 21 | select type(`goods.goodsId[objectid]`) as t, `goods.goodsId[objectid]` as v from ( 22 | select yield_array(`order.goodses`) as goods from orders 23 | ) a; 24 | 25 | -- UUID 26 | select type(`order.payInfo.payId[uuid]`) as t, `order.payInfo.payId[uuid]` as v from orders; 27 | -- UUID转换为数字 28 | select `payId`, type(`payId[int]`) as t, `payId[int]` as v from ( 29 | select `order.payInfo.payId[uuid]` as payId from orders 30 | ) a; 31 | 32 | -- 转换为bool值 33 | select type(`order.orderId[bool]`) as t, `order.orderId[bool]` as v from orders; 34 | select `v1[bool]` as v1, `v2[bool]` as v2, `v3[bool]` as v3, `v4[bool]` as v4, `v5[bool]` as v5 from ( 35 | select 0 as v1, 1 as v2, '' as v3, 'a' as v4, null as v5 36 | ) a; -------------------------------------------------------------------------------- /examples/type_annotation/type_declaration_cast.sql: -------------------------------------------------------------------------------- 1 | 2 | insert into `orders` select * from `data/data.json`; 3 | 4 | -- 字符串 5 | select type(`order.orderId`::varchar) as t, `order.orderId`::varchar as v from orders; 6 | -- 数字格式化 7 | select type(`order.payInfo.amount[str %.02f]`) as t, `order.payInfo.amount[str %.02f]` as v from orders; 8 | 9 | -- 时间 10 | select type(`order.orderAt`::datetime) as t, `order.orderAt`::datetime as v from orders; 11 | -- 时间格式化为字符串 12 | select `orderAt`, type(`orderAt[str %Y-%m-%d]`) as t, `orderAt[str %Y-%m-%d]` as v from ( 13 | select `order.orderAt`::datetime as orderAt from orders 14 | ) a; 15 | -- 时间转换为时间戳 16 | select `orderAt`, type(`orderAt`::int) as t, `orderAt`::int as v from ( 17 | select `order.orderAt`::datetime as orderAt from orders 18 | ) a; 19 | 20 | -- ObjectId,如和mongo进行join查询时需要为转换对应类型 21 | select type(`goods.goodsId[objectid]`) as t, `goods.goodsId[objectid]` as v from ( 22 | select yield_array(`order.goodses`) as goods from orders 23 | ) a; 24 | 25 | -- UUID 26 | select type(`order.payInfo.payId`::uuid) as t, `order.payInfo.payId`::uuid as v from orders; 27 | -- UUID转换为数字 28 | select `payId`, type(`payId`::int) as t, `payId`::int as v from ( 29 | select `order.payInfo.payId`::uuid as payId from orders 30 | ) a; 31 | 32 | -- 转换为bool值 33 | select type(`order.orderId`::bool) as t, `order.orderId`::bool as v from orders; 34 | select `v1`::bool as v1, `v2`::bool as v2, `v3`::bool as v3, `v4`::bool as v4, `v5`::bool as v5 from ( 35 | select 0 as v1, 1 as v2, '' as v3, 'a' as v4, null as v5 36 | ) a; -------------------------------------------------------------------------------- /examples/window_aggregate/data/goodses.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "goods_id": 1, 5 | "goods_name": "青菜", 6 | "status": 0 7 | }, 8 | { 9 | "uid": 1, 10 | "goods_id": 2, 11 | "goods_name": "白菜", 12 | "status": 0 13 | }, 14 | { 15 | "uid": 2, 16 | "goods_id": 3, 17 | "goods_name": "萝卜", 18 | "status": 0 19 | } 20 | ] -------------------------------------------------------------------------------- /examples/window_aggregate/data/orders.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "order_id": 1, 4 | "uid": 2, 5 | "goods_id": 1, 6 | "amount": 9.6, 7 | "status": 0 8 | }, 9 | { 10 | "order_id": 2, 11 | "uid": 1, 12 | "goods_id": 1, 13 | "amount": 7.6, 14 | "status": 0 15 | }, 16 | { 17 | "order_id": 3, 18 | "uid": 2, 19 | "goods_id": 2, 20 | "amount": 3, 21 | "status": 0 22 | }, 23 | { 24 | "order_id": 4, 25 | "uid": 1, 26 | "goods_id": 1, 27 | "amount": 8, 28 | "status": 0 29 | }, 30 | { 31 | "order_id": 5, 32 | "uid": 2, 33 | "goods_id": 1, 34 | "amount": 8, 35 | "status": 0 36 | }, 37 | { 38 | "order_id": 6, 39 | "uid": 2, 40 | "goods_id": 2, 41 | "amount": 7, 42 | "status": 0 43 | } 44 | ] -------------------------------------------------------------------------------- /examples/window_aggregate/data/users.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uid": 1, 4 | "name": "王五", 5 | "gender": "男", 6 | "status": 0 7 | }, 8 | { 9 | "uid": 2, 10 | "name": "李四", 11 | "gender": "女", 12 | "status": 0 13 | } 14 | ] -------------------------------------------------------------------------------- /examples/window_aggregate/window_customize.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/25 3 | # create by: snower 4 | 5 | from syncanysql.calculaters import StateAggregateCalculater, WindowStateAggregateCalculater, register_calculater 6 | 7 | 8 | @register_calculater("window_aggregate_unique") 9 | class UniqueSetWindowAggregateCalculater(StateAggregateCalculater): 10 | def aggregate(self, state_value, data_value): 11 | if data_value is None: 12 | return state_value 13 | if state_value is None: 14 | return {data_value} 15 | state_value.add(data_value) 16 | return state_value 17 | 18 | def reduce(self, state_value, data_value): 19 | if data_value is None: 20 | return state_value 21 | if state_value is None: 22 | return data_value 23 | return state_value | data_value 24 | 25 | def final_value(self, state_value): 26 | return state_value 27 | 28 | 29 | @register_calculater("window_aggregate_join") 30 | class JoinArrayWindowAggregateCalculater(WindowStateAggregateCalculater): 31 | def aggregate(self, state_value, data_value, context): 32 | if data_value is None: 33 | return state_value 34 | if state_value is None: 35 | return [str(data_value)] 36 | state_value.append(str(data_value)) 37 | return state_value 38 | 39 | def order_aggregate(self, state_value, data_value, context): 40 | if data_value is None: 41 | return state_value 42 | if state_value is None: 43 | return [str(data_value)] 44 | state_value.append(str(data_value)) 45 | return state_value 46 | 47 | def final_value(self, state_value): 48 | if not state_value: 49 | return "" 50 | return ",".join(state_value) 51 | -------------------------------------------------------------------------------- /examples/yield_data/README.md: -------------------------------------------------------------------------------- 1 | # yield迭代展开数组 -------------------------------------------------------------------------------- /examples/yield_data/generate_customize.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/5/30 3 | # create by: snower 4 | 5 | from syncanysql.calculaters import GenerateCalculater, register_calculater 6 | 7 | 8 | @register_calculater("range_count") 9 | class RangeGenerateCalculater(GenerateCalculater): 10 | def calculate(self, count): 11 | for i in range(count): 12 | yield i 13 | 14 | 15 | def range_count(count): 16 | for i in range(count): 17 | yield i -------------------------------------------------------------------------------- /examples/yield_data/yield_data.sql: -------------------------------------------------------------------------------- 1 | use `generate_customize`; 2 | 3 | set @aaa=(1, 2, 3); 4 | 5 | select yield_array(('a', 'b', 'c')) as a, yield_array(@aaa) as b; 6 | 7 | insert into test_data (id, name, order_date, amount, goods) values (1, 'limei', '2022-01-01', 5.5, '青菜'), (2, 'wanzhi', '2022-01-01', 8.2, '白菜'), (3, 'wanzhi', '2022-01-01', 2.2, '青菜'); 8 | 9 | select goods into @bbb from test_data; 10 | 11 | select yield_array(select goods from test_data) as a, yield_array(@bbb) as b; 12 | 13 | select yield_array(data) as a from ( 14 | select json$decode("[1, 2, 3]") as data 15 | ) t; 16 | 17 | select range_count(3), generate_customize$range_count(4); -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pytz>=2018.5 2 | tzlocal>=1.5.1 3 | pyyaml>=5.1.2 4 | pendulum>=2.1.2 5 | sqlglot>=11.5.5,<12 6 | Pygments>=2.14.0 7 | Pygments>=2.14.0 8 | prompt-toolkit>=3.0.36 9 | rich>=9.11.1 10 | 11 | pymongo>=3.6.1 12 | PyMySQL>=0.8.1 13 | openpyxl>=2.5.0 14 | psycopg2-binary>=2.8.6 15 | elasticsearch>=6.3.1 16 | influxdb>=5.3.1 17 | clickhouse_driver>=0.1.5 18 | redis>=3.5.3 19 | requests>=2.22.0 20 | pymssql>=2.2.7 21 | prql-python>=0.11.1 22 | 23 | # Items below this point will not be included in the Docker Image 24 | 25 | ##Dev 26 | coverage 27 | 28 | # Testing dependencies 29 | pytest 30 | pytest-benchmark 31 | pytest-cov 32 | pytest-integration -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 23/02/07 3 | # create by: snower 4 | 5 | import sys 6 | import os 7 | from setuptools import find_packages, setup 8 | 9 | version = "0.1.24" 10 | 11 | if os.path.exists("README.md"): 12 | if sys.version_info[0] >= 3: 13 | try: 14 | with open("README.md", encoding="utf-8") as fp: 15 | long_description = fp.read() 16 | except Exception as e: 17 | print("Waring: " + str(e)) 18 | long_description = 'https://github.com/snower/syncany-sql' 19 | else: 20 | try: 21 | with open("README.md") as fp: 22 | long_description = fp.read() 23 | except Exception as e: 24 | print("Waring: " + str(e)) 25 | long_description = 'https://github.com/snower/syncany-sql' 26 | else: 27 | long_description = 'https://github.com/snower/syncany-sql' 28 | 29 | setup( 30 | name='syncanysql', 31 | version=version, 32 | url='https://github.com/snower/syncany-sql', 33 | author='snower', 34 | author_email='sujian199@gmail.com', 35 | license='MIT', 36 | packages=find_packages(exclude=['*tests*']), 37 | zip_safe=False, 38 | install_requires=[ 39 | "pyyaml>=6.0", 40 | "sqlglot>=11.5.5,<12", 41 | "syncany>=0.2.25", 42 | 'Pygments>=2.14.0', 43 | 'prompt-toolkit>=3.0.36', 44 | "rich>=9.11.1", 45 | ], 46 | extras_require={ 47 | "pymongo": ['pymongo>=3.6.1'], 48 | "pymysql": ['PyMySQL>=0.8.1'], 49 | "openpyxl": ["openpyxl>=2.5.0"], 50 | "postgresql": ["psycopg2>=2.8.6"], 51 | "elasticsearch": ["elasticsearch>=6.3.1"], 52 | "influxdb": ["influxdb>=5.3.1"], 53 | "clickhouse": ["clickhouse_driver>=0.1.5"], 54 | "redis": ["redis>=3.5.3"], 55 | "requests": ["requests>=2.22.0"], 56 | "pymssql": ['pymssql>=2.2.7'], 57 | "prql-python": ["prql-python>=0.11.1"], 58 | }, 59 | package_data={ 60 | '': ['README.md'], 61 | }, 62 | entry_points={ 63 | 'console_scripts': [ 64 | 'syncany-sql = syncanysql.main:main', 65 | ], 66 | }, 67 | description='Simple and easy-to-use sql execution engine', 68 | long_description=long_description, 69 | long_description_content_type='text/markdown' 70 | ) 71 | -------------------------------------------------------------------------------- /syncanysql/calculaters/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/3/2 3 | # create by: snower 4 | 5 | import os 6 | import types 7 | from syncany.calculaters import CALCULATERS, Calculater, TypeFormatCalculater, TypingCalculater, MathematicalCalculater 8 | from syncany.calculaters import register_calculater, find_calculater, CalculaterUnknownException 9 | from syncany.calculaters.import_calculater import ImportCalculater 10 | from .env_variable_calculater import CurrentEnvVariableCalculater 11 | from .query_tasker_calculater import ExecuteQueryTaskerCalculater 12 | from .row_calculater import * 13 | from .mysql_calculater import MysqlCalculater, register_mysql_func 14 | from .generate_calculater import * 15 | from .aggregate_calculater import * 16 | from .window_calculater import * 17 | 18 | SQL_CALCULATERS = { 19 | "current_env_variable": CurrentEnvVariableCalculater, 20 | "row_index": RowIndexCalculater, 21 | "row_last": RowLastCalculater, 22 | "mysql": MysqlCalculater, 23 | "aggregate_key": AggregateKeyCalculater, 24 | "aggregate_count": AggregateCountCalculater, 25 | "aggregate_distinct_count": AggregateDistinctCountCalculater, 26 | "aggregate_sum": AggregateSumCalculater, 27 | "aggregate_max": AggregateMaxCalculater, 28 | "aggregate_min": AggregateMinCalculater, 29 | "aggregate_avg": AggregateAvgCalculater, 30 | "yield_array": GenerateYieldArrayCalculater, 31 | "group_concat": AggregateGroupConcatCalculater, 32 | "group_array": AggregateGroupArrayCalculater, 33 | "group_uniq_array": AggregateGroupUniqArrayCalculater, 34 | "group_bit_and": AggregateGroupBitAndCalculater, 35 | "group_bit_or": AggregateGroupBitOrCalculater, 36 | "group_bit_xor": AggregateGroupBitXorCalculater, 37 | "json_arrayagg": AggregateJsonArrayaggCalculater, 38 | "json_objectagg": AggregateJsonObjectaggCalculater, 39 | "row_number": WindowAggregateRowNumberCalculater, 40 | "rank": WindowAggregateRankCalculater, 41 | "dense_rank": WindowAggregateDenseRankCalculater, 42 | "percent_rank": WindowAggregatePercentRankCalculater, 43 | "cume_dist": WindowAggregateCumeDistCalculater, 44 | "first_value": WindowAggregateFirstValueCalculater, 45 | "last_value": WindowAggregateLastValueCalculater, 46 | "nth_value": WindowAggregateNthValueCalculater, 47 | "ntile": WindowAggregateNtileCalculater, 48 | "lag": WindowAggregateLagCalculater, 49 | "lead": WindowAggregateLeadCalculater, 50 | "execute_query_tasker": ExecuteQueryTaskerCalculater, 51 | } 52 | CALCULATERS.update(SQL_CALCULATERS) 53 | if not os.environ.get("SYNCANY_PYEVAL_DISABLED"): 54 | from .pyeval_calculater import PyEvalCalculater, register_pyeval_module 55 | CALCULATERS["pyeval"] = PyEvalCalculater 56 | 57 | 58 | def is_mysql_func(name): 59 | if MysqlCalculater.funcs is None: 60 | MysqlCalculater.find_func(name) 61 | return name in MysqlCalculater.funcs 62 | 63 | 64 | def find_generate_calculater(name): 65 | calculater = find_calculater(name) 66 | if issubclass(calculater, ImportCalculater): 67 | try: 68 | import_calculater = ImportCalculater(name) 69 | if isinstance(import_calculater.module_or_func, (types.FunctionType, types.LambdaType)): 70 | if import_calculater.module_or_func.__code__.co_flags & 0x20 != 0: 71 | return calculater 72 | except: 73 | pass 74 | raise CalculaterUnknownException("%s is unknown generate calculater" % name) 75 | if not issubclass(calculater, GenerateCalculater): 76 | raise CalculaterUnknownException("%s is unknown generate calculater" % name) 77 | return calculater 78 | 79 | 80 | def find_aggregate_calculater(name): 81 | calculater = find_calculater(name) 82 | if not issubclass(calculater, AggregateCalculater): 83 | raise CalculaterUnknownException("%s is unknown aggregate calculater" % name) 84 | return calculater 85 | 86 | 87 | def find_window_aggregate_calculater(name): 88 | calculater = find_calculater(name) 89 | if not issubclass(calculater, WindowAggregateCalculater): 90 | raise CalculaterUnknownException("%s is unknown window aggregate calculater" % name) 91 | return calculater -------------------------------------------------------------------------------- /syncanysql/calculaters/env_variable_calculater.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/5/26 3 | # create by: snower 4 | 5 | from syncany.calculaters.calculater import Calculater 6 | 7 | 8 | class CurrentEnvVariableCalculater(Calculater): 9 | def __init__(self, *args): 10 | super(CurrentEnvVariableCalculater, self).__init__(*args) 11 | 12 | if self.name[22:] == "get_value": 13 | self.func = self.get_value 14 | elif self.name[22:] == "set_value": 15 | self.func = self.set_value 16 | else: 17 | self.func = lambda *args: None 18 | 19 | def calculate(self, *args): 20 | return self.func(*args) 21 | 22 | def get_value(self, key): 23 | from ..executor import Executor 24 | current_executor = Executor.current() 25 | if not current_executor: 26 | return None 27 | try: 28 | return current_executor.env_variables.get_value(key) 29 | except KeyError: 30 | return None 31 | 32 | def set_value(self, key, value): 33 | from ..executor import Executor 34 | current_executor = Executor.current() 35 | if current_executor: 36 | current_executor.env_variables[key] = value 37 | return value 38 | 39 | def is_realtime_calculater(self): 40 | return True 41 | -------------------------------------------------------------------------------- /syncanysql/calculaters/generate_calculater.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/5/30 3 | # create by: snower 4 | 5 | from syncany.calculaters.calculater import Calculater 6 | 7 | 8 | class GenerateCalculater(Calculater): 9 | pass 10 | 11 | 12 | class GenerateYieldArrayCalculater(GenerateCalculater): 13 | def calculate(self, values): 14 | if isinstance(values, list): 15 | for value in values: 16 | yield value 17 | else: 18 | yield values 19 | -------------------------------------------------------------------------------- /syncanysql/calculaters/mysql_funcs/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/3/2 3 | # create by: snower 4 | 5 | from . import number_funcs 6 | from . import string_funcs 7 | from . import datetime_funcs 8 | from . import logical_funcs 9 | from . import json_funcs 10 | from . import regexp_funcs 11 | 12 | funcs = {} 13 | funcs.update(number_funcs.funcs) 14 | funcs.update(string_funcs.funcs) 15 | funcs.update(datetime_funcs.funcs) 16 | funcs.update(logical_funcs.funcs) 17 | funcs.update(json_funcs.funcs) 18 | funcs.update(regexp_funcs.funcs) -------------------------------------------------------------------------------- /syncanysql/calculaters/mysql_funcs/regexp_funcs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2025/3/21 3 | # create by: snower 4 | 5 | import re 6 | from syncany.calculaters import typing_filter 7 | 8 | def parse_flags(match_type): 9 | if not match_type: 10 | return re.IGNORECASE 11 | if match_type.isdigit(): 12 | return int(match_type) | re.IGNORECASE 13 | flags = re.IGNORECASE 14 | for c in match_type.lower(): 15 | if c == 'c': 16 | flags = flags & (~re.IGNORECASE) 17 | elif c == 'i': 18 | flags = flags | re.IGNORECASE 19 | elif c == 'm': 20 | flags = flags | re.MULTILINE 21 | elif c == 'n': 22 | flags = flags | re.DOTALL 23 | elif c == 'u': 24 | flags = flags | re.UNICODE 25 | return flags 26 | 27 | @typing_filter(int) 28 | def mysql_regexp(s, r, match_type=None): 29 | if r is None or s is None: 30 | return None 31 | try: 32 | return 1 if re.match(r, s, parse_flags(match_type)) else 0 33 | except: 34 | return 0 35 | 36 | @typing_filter(int) 37 | def mysql_regexplike(s, r, match_type=None): 38 | if r is None or s is None: 39 | return None 40 | try: 41 | return 1 if re.match(r, s, parse_flags(match_type)) else 0 42 | except: 43 | return 0 44 | 45 | @typing_filter(int) 46 | def mysql_regexp_instr(s, r, pos=1, occurrence=1, return_option=0, match_type=None): 47 | if r is None or s is None: 48 | return None 49 | try: 50 | flags = parse_flags(match_type) 51 | cs = s[pos-1:] 52 | while cs: 53 | m = re.search(r, cs, flags) 54 | if not m: 55 | return 0 56 | start_index, end_index = m.span() 57 | if occurrence <= 1: 58 | return pos + (end_index if return_option else start_index) 59 | cs = cs[end_index:] 60 | occurrence -= 1 61 | pos += end_index 62 | return 0 63 | except: 64 | return 0 65 | 66 | @typing_filter(str) 67 | def mysql_regexp_replace(s, r, rs, pos=1, occurrence=0, match_type=None): 68 | if r is None or s is None or rs is None: 69 | return None 70 | try: 71 | flags = parse_flags(match_type) 72 | cs = s[pos-1:] 73 | if occurrence <= 0: 74 | return re.sub(r, rs, cs, count=0, flags=flags) 75 | while cs: 76 | m = re.search(r, cs, flags) 77 | if not m: 78 | return s 79 | start_index, end_index = m.span() 80 | if occurrence <= 1: 81 | return s[:pos + start_index - 1] + rs + s[pos + end_index - 1:] 82 | cs = cs[end_index:] 83 | occurrence -= 1 84 | pos += end_index 85 | return s 86 | except: 87 | return None 88 | 89 | @typing_filter(str) 90 | def mysql_regexp_substr(s, r, pos=1, occurrence=1, match_type=None): 91 | if r is None or s is None: 92 | return None 93 | try: 94 | flags = parse_flags(match_type) 95 | cs = s[pos-1:] 96 | while cs: 97 | m = re.search(r, cs, flags) 98 | if not m: 99 | return None 100 | start_index, end_index = m.span() 101 | if occurrence <= 1: 102 | return cs[start_index:end_index] 103 | cs = cs[end_index:] 104 | occurrence -= 1 105 | pos += end_index 106 | return None 107 | except: 108 | return None 109 | 110 | @typing_filter(int) 111 | def mysql_like(s, r, match_type=None): 112 | if r is None or s is None: 113 | return None 114 | r = ".*" if r == '%%' else "".join([rg.replace("%", ".*") for rg in re.escape(r).split("%%")]) 115 | try: 116 | return 1 if re.match(r, s, parse_flags(match_type)) else 0 117 | except: 118 | return 0 119 | 120 | funcs = {key[6:]: value for key, value in globals().items() if key.startswith("mysql_")} -------------------------------------------------------------------------------- /syncanysql/calculaters/pyeval_calculater.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2025/4/3 3 | # create by: snower 4 | 5 | import traceback 6 | from syncany.logger import get_logger 7 | from syncany.calculaters.calculater import Calculater 8 | 9 | 10 | class PyEvalCalculater(Calculater): 11 | globals = None 12 | 13 | @classmethod 14 | def init_globals(cls): 15 | import sys 16 | import os 17 | import datetime 18 | import time 19 | import math 20 | import random 21 | import string 22 | import uuid 23 | import base64 24 | import hashlib 25 | import pickle 26 | import json 27 | import re 28 | try: 29 | import requests 30 | except: 31 | requests = None 32 | 33 | cls.globals = { 34 | "sys": sys, 35 | "os": os, 36 | "datetime": datetime, 37 | "time": time, 38 | "math": math, 39 | "random": random, 40 | "string": string, 41 | "uuid": uuid, 42 | "base64": base64, 43 | "hashlib": hashlib, 44 | "pickle": pickle, 45 | "json": json, 46 | "re": re, 47 | } 48 | if requests is not None: 49 | cls.globals["requests"] = requests 50 | 51 | def __init__(self, *args, **kwargs): 52 | Calculater.__init__(self, *args, **kwargs) 53 | 54 | if self.globals is None: 55 | self.init_globals() 56 | 57 | def calculate(self, *args): 58 | if not args or not args[0]: 59 | return None 60 | try: 61 | return eval(args[0], self.globals, {"args": args[1:]}) 62 | except Exception as e: 63 | get_logger().warning("pyeval calculater execute %s error: %s\n%s", args, e, traceback.format_exc()) 64 | return None 65 | 66 | 67 | def register_pyeval_module(name, module): 68 | if PyEvalCalculater.globals is None: 69 | PyEvalCalculater.init_globals() 70 | PyEvalCalculater.globals[name] = module 71 | -------------------------------------------------------------------------------- /syncanysql/calculaters/query_tasker_calculater.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2025/3/24 3 | # create by: snower 4 | 5 | import copy 6 | from syncany.taskers.tasker import _thread_local 7 | from syncany.calculaters.calculater import LoaderCalculater 8 | 9 | 10 | class ExecuteQueryTaskerCalculater(LoaderCalculater): 11 | def __init__(self, *args, **kwargs): 12 | super(ExecuteQueryTaskerCalculater, self).__init__(*args, **kwargs) 13 | 14 | self.config = None 15 | self.arguments = None 16 | self.executor = None 17 | self.tasker = None 18 | self.tasker_index = 0 19 | 20 | def start(self, tasker, loader, arguments, task_config, **kwargs): 21 | current_tasker = _thread_local.current_tasker 22 | try: 23 | self.config = task_config 24 | kn, knl = (task_config["name"] + "@"), len(task_config["name"] + "@") 25 | self.arguments = {} 26 | for key, value in arguments.items(): 27 | if key[:knl] != kn: 28 | continue 29 | self.arguments[key[knl:]] = value 30 | self.create_executor_tasker() 31 | finally: 32 | _thread_local.current_tasker = current_tasker 33 | 34 | def calculate(self, primary_keys, query, task_config, *args): 35 | current_tasker = _thread_local.current_tasker 36 | try: 37 | if self.executor is None: 38 | self.create_executor_tasker() 39 | with self.executor as executor: 40 | for exp, values in query["filters"].items(): 41 | if not exp: 42 | continue 43 | for key, value in values: 44 | if not key or not isinstance(key, str): 45 | continue 46 | executor.env_variables["@" + key] = value 47 | database, collection_name = self.tasker.tasker.outputer.db, self.tasker.tasker.outputer.name 48 | executor.execute() 49 | query = database.query(collection_name, ["id"]) 50 | datas = query.commit() 51 | delete = database.delete(collection_name, ["id"]) 52 | delete.commit() 53 | return datas 54 | finally: 55 | self.executor, self.tasker = None, None 56 | _thread_local.current_tasker = current_tasker 57 | 58 | def create_executor_tasker(self): 59 | from ..executor import Executor 60 | from ..taskers.query import QueryTasker 61 | 62 | current_executor = Executor.current() 63 | if self.tasker_index <= 0: 64 | self.tasker_index = current_executor.distribute_tasker_index() 65 | with Executor(current_executor.manager, current_executor.session_config, current_executor) as executor: 66 | config = copy.deepcopy(self.config) 67 | config["output"] = "&.--.__queryTasker_" + str(id(executor)) + "::" + config["output"].split("::")[-1] 68 | config["name"] = config["name"] + "#queryTasker" 69 | tasker = QueryTasker(config, is_inner_subquery=True) 70 | tasker.tasker_index = self.tasker_index 71 | executor.runners.extend(tasker.start(config.get("name"), executor, executor.session_config, 72 | executor.manager, self.arguments)) 73 | self.tasker, self.executor = tasker, executor -------------------------------------------------------------------------------- /syncanysql/calculaters/row_calculater.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2024/2/2 3 | # create by: snower 4 | 5 | from syncany.calculaters.calculater import Calculater 6 | 7 | 8 | class RowIndexCalculater(Calculater): 9 | @classmethod 10 | def instance(cls, name): 11 | return RowIndexCalculater(name) 12 | 13 | def __init__(self, *args, **kwargs): 14 | super(RowIndexCalculater, self).__init__(*args, **kwargs) 15 | 16 | self.row_index = 0 17 | 18 | def calculate(self, *args): 19 | self.row_index += 1 20 | return self.row_index 21 | 22 | 23 | class RowLastCalculater(Calculater): 24 | @classmethod 25 | def instance(cls, name): 26 | return RowLastCalculater(name) 27 | 28 | def __init__(self, *args, **kwargs): 29 | super(RowLastCalculater, self).__init__(*args, **kwargs) 30 | 31 | self.row_last = None 32 | 33 | def calculate(self, *args): 34 | row_last, self.row_last = self.row_last, (args[0] if len(args) == 1 else args) 35 | return row_last 36 | -------------------------------------------------------------------------------- /syncanysql/errors.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/8 3 | # create by: snower 4 | 5 | from syncany.errors import SyncanyException 6 | 7 | class SyncanySqlException(SyncanyException): 8 | pass 9 | 10 | class SyncanySqlCompileException(SyncanySqlException): 11 | pass 12 | 13 | class SyncanySqlExecutorException(SyncanySqlException): 14 | pass -------------------------------------------------------------------------------- /syncanysql/main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/7 3 | # create by: snower 4 | 5 | import sys 6 | import os 7 | import signal 8 | import traceback 9 | import time 10 | from syncany.logger import get_logger 11 | from syncany.taskers.manager import TaskerManager 12 | from syncany.database.database import DatabaseManager 13 | from .config import GlobalConfig 14 | from syncanysql.executor import Executor 15 | from .parser import SqlParser, FileParser 16 | 17 | 18 | def main(): 19 | if os.getcwd() not in sys.path: 20 | sys.path.insert(0, os.getcwd()) 21 | try: 22 | fileext = os.path.splitext(sys.argv[1])[1].lower() if len(sys.argv) >= 2 else None 23 | except: 24 | fileext = None 25 | if sys.stdin.isatty() and fileext and fileext not in (".sql", ".sqlx", ".prql"): 26 | print("usage: syncany [-h] sqlx|sql|prql") 27 | print("syncany error: require sqlx or sql or prql file") 28 | exit(2) 29 | 30 | try: 31 | if sys.platform != "win32": 32 | signal.signal(signal.SIGHUP, lambda signum, frame: executor.terminate()) 33 | signal.signal(signal.SIGTERM, lambda signum, frame: executor.terminate()) 34 | 35 | global_config = GlobalConfig() 36 | init_execute_files = global_config.load() 37 | if not sys.stdin.isatty() and (not fileext or fileext not in (".sql", ".sqlx", ".prql")): 38 | global_config.config_logging(False) 39 | else: 40 | global_config.config_logging(True) 41 | global_config.load_extensions() 42 | manager = TaskerManager(DatabaseManager()) 43 | 44 | try: 45 | with Executor(manager, global_config.session()) as executor: 46 | if init_execute_files: 47 | for init_execute_file in init_execute_files: 48 | file_parser = FileParser(init_execute_file) 49 | executor.run("init " + init_execute_file, file_parser.load()) 50 | executor.execute() 51 | 52 | if not sys.stdin.isatty() and (not fileext or fileext not in (".sql", ".sqlx", ".prql")): 53 | start_time = time.time() 54 | content = sys.stdin.read().strip() 55 | if not content: 56 | return 57 | sql_parser = SqlParser(content[1:-1] if content[0] in ('"', "'") and content[-1] in ('"', "'") else content) 58 | sqls = sql_parser.split() 59 | executor.run("pipe", sqls) 60 | get_logger().info("execute pipe sql start %d task", sys.argv[1], executor.get_tasker_index()) 61 | try: 62 | executor.execute() 63 | except Exception as e: 64 | get_logger().info("execute pipe sql finish with Exception %s:%s %.2fms", e.__class__.__name__, e, 65 | (time.time() - start_time) * 1000) 66 | raise e 67 | else: 68 | get_logger().info("execute pipe sql finish %.2fms", (time.time() - start_time) * 1000) 69 | elif len(sys.argv) >= 2: 70 | start_time = time.time() 71 | file_parser = FileParser(sys.argv[1]) 72 | sqls = file_parser.load() 73 | executor.run(sys.argv[1], sqls) 74 | get_logger().info("execute file %s start %d task", sys.argv[1], executor.get_tasker_index()) 75 | try: 76 | executor.execute() 77 | except Exception as e: 78 | get_logger().info("execute file %s finish with Exception %s:%s %.2fms", sys.argv[1], e.__class__.__name__, e, 79 | (time.time() - start_time) * 1000) 80 | raise e 81 | else: 82 | get_logger().info("execute file %s finish %.2fms", sys.argv[1], (time.time() - start_time) * 1000) 83 | else: 84 | from .prompt import CliPrompt 85 | cli_prompt = CliPrompt(manager, global_config.session(), executor) 86 | cli_prompt.run() 87 | finally: 88 | manager.close() 89 | except SystemError: 90 | get_logger().error("signal exited") 91 | exit(130) 92 | except KeyboardInterrupt: 93 | get_logger().error("Crtl+C exited") 94 | exit(130) 95 | except Exception as e: 96 | get_logger().error("%s\n%s", e, traceback.format_exc()) 97 | exit(1) 98 | 99 | if __name__ == "__main__": 100 | main() -------------------------------------------------------------------------------- /syncanysql/taskers/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/13 3 | # create by: snower 4 | -------------------------------------------------------------------------------- /syncanysql/taskers/delete.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/15 3 | # create by: snower 4 | 5 | from syncany.hook import Hooker 6 | from .query import QueryTasker 7 | 8 | 9 | class StreamingFollowHooker(Hooker): 10 | def __init__(self, manager, tasker): 11 | self.manager = manager 12 | self.tasker = tasker 13 | 14 | def loaded(self, tasker, datas): 15 | tasker.loader.set_streaming(False) 16 | return datas 17 | 18 | def outputed(self, tasker, datas): 19 | tasker.loader.set_streaming(True if tasker.outputer.is_streaming() else False) 20 | 21 | 22 | class DeleteTasker(object): 23 | def __init__(self, config): 24 | self.tasker = QueryTasker(config) 25 | 26 | def start(self, name, executor, session_config, manager, arguments): 27 | arguments["@limit"] = 0 28 | arguments["@batch"] = 0 29 | self.tasker.config["name"] = self.tasker.config["name"] + "#delete" 30 | taskers = self.tasker.start(name, executor, session_config, manager, arguments) 31 | self.tasker.tasker.add_hooker(StreamingFollowHooker(manager, self)) 32 | return taskers 33 | 34 | def run(self, executor, session_config, manager): 35 | try: 36 | return self.tasker.run(executor, session_config, manager) 37 | finally: 38 | self.tasker = None 39 | 40 | def terminate(self): 41 | if not self.tasker: 42 | return 43 | self.tasker.terminate() 44 | self.tasker = None 45 | -------------------------------------------------------------------------------- /syncanysql/taskers/execute.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/15 3 | # create by: snower 4 | 5 | import os 6 | import time 7 | import json 8 | from syncany.logger import get_logger 9 | from ..parser import FileParser 10 | 11 | 12 | class ExecuteTasker(object): 13 | def __init__(self, config): 14 | self.config = config 15 | self.executor = None 16 | 17 | def start(self, name, executor, session_config, manager, arguments): 18 | start_time = time.time() 19 | try: 20 | if self.config["filename"].endswith(".sql") or self.config["filename"].endswith(".sqlx") or self.config["filename"].endswith(".prql"): 21 | if self.executor is None: 22 | from ..executor import Executor 23 | self.executor = Executor(manager, session_config.session(), executor) 24 | 25 | get_logger().info("execute file %s", self.config["filename"]) 26 | try: 27 | file_parser = FileParser(self.config["filename"]) 28 | sqls = file_parser.load() 29 | with self.executor as executor: 30 | executor.run(self.config["filename"], sqls) 31 | finally: 32 | session_config.merged_config = None 33 | elif self.config["filename"].endswith(".json"): 34 | if self.executor is None: 35 | from ..executor import Executor 36 | self.executor = Executor(manager, session_config.session(), executor) 37 | 38 | from .query import QueryTasker 39 | with open(self.config["filename"], 'r', encoding=os.environ.get("SYNCANYENCODING", "utf-8")) as fp: 40 | tasker = QueryTasker(json.loads(fp.read())) 41 | with self.executor as executor: 42 | arguments = {"@verbose": arguments.get("@verbose", False), "@timeout": arguments.get("@timeout", 0), 43 | "@limit": self.executor.env_variables.get("@limit", 0), "@batch": self.executor.env_variables.get("@batch", 0), 44 | "@streaming": arguments.get("@streaming", False), "@recovery": arguments.get("@recovery", False), 45 | "@join_batch": arguments.get("@join_batch", 10000), "@insert_batch": arguments.get("@insert_batch", 0), 46 | "@use_input": self.executor.env_variables.get("@use_input", None), "@use_output": self.executor.env_variables.get("@use_output", None), 47 | "@use_output_type": self.executor.env_variables.get("@use_output_type", None), "@primary_order": False} 48 | name = "execute_" + "".join([c if c.isalpha() or c.isdigit() else '_' for c in self.config["filename"]]) 49 | executor.runners.extend(tasker.start(name, executor, executor.session_config, executor.manager, arguments)) 50 | finally: 51 | get_logger().info("execute %s start %.2fms", self.config["filename"], (time.time() - start_time) * 1000) 52 | return [self] 53 | 54 | def run(self, executor, session_config, manager): 55 | start_time = time.time() 56 | try: 57 | if not self.executor: 58 | try: 59 | os.system(self.config["filename"]) 60 | except Exception as e: 61 | get_logger().warning("execute system command error: %s", str(e)) 62 | return 63 | with self.executor as executor: 64 | return executor.execute() 65 | finally: 66 | get_logger().info("execute %s run %.2fms", self.config["filename"], (time.time() - start_time) * 1000) 67 | 68 | def terminate(self): 69 | if not self.executor: 70 | return 71 | self.executor.terminate() 72 | self.executor = None 73 | -------------------------------------------------------------------------------- /syncanysql/taskers/explain.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/14 3 | # create by: snower 4 | 5 | import copy 6 | import datetime 7 | from syncany.main import beautify_print 8 | 9 | 10 | class ExplainTasker(object): 11 | def __init__(self, tasker, sql=None): 12 | self.config = None 13 | self.tasker = tasker 14 | self.sql = sql 15 | 16 | def start(self, name, executor, session_config, manager, arguments): 17 | self.config = copy.deepcopy(self.tasker.config) 18 | if self.sql: 19 | beautify_print("%s tasker %s execute sql:\n%s" % (datetime.datetime.now(), self.config["name"], self.sql)) 20 | print() 21 | beautify_print("%s tasker %s compiled config:" % (datetime.datetime.now(), self.config["name"])) 22 | beautify_print(self.config) 23 | print() 24 | 25 | for key in list(arguments.keys()): 26 | if key.endswith("@verbose"): 27 | arguments[key] = True 28 | self.tasker.start(name, executor, session_config, manager, arguments) 29 | return [self] 30 | 31 | def run(self, executor, session_config, manager): 32 | try: 33 | return self.tasker.run(executor, session_config, manager) 34 | finally: 35 | self.config, self.tasker = None, None 36 | 37 | def terminate(self): 38 | if not self.tasker: 39 | return 40 | self.tasker.terminate() 41 | self.tasker = None 42 | -------------------------------------------------------------------------------- /syncanysql/taskers/into.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/3/17 3 | # create by: snower 4 | 5 | 6 | class IntoTasker(object): 7 | def __init__(self, tasker, config): 8 | self.tasker = tasker 9 | self.config = config 10 | 11 | def start(self, name, executor, session_config, manager, arguments): 12 | for variable in self.config["variables"]: 13 | if variable in executor.env_variables: 14 | continue 15 | executor.env_variables[variable] = None 16 | self.tasker.config["output"] = "&.--.__into_" + str(id(self)) + "::" + self.tasker.config["output"].split("::")[-1] 17 | self.tasker.config["name"] = self.tasker.config["name"] + "#into" 18 | self.tasker.start(name, executor, session_config, manager, arguments) 19 | return [self] 20 | 21 | def run(self, executor, session_config, manager): 22 | try: 23 | core_tasker = self.tasker.tasker 24 | if not core_tasker: 25 | return 1 26 | database = core_tasker.outputer.db 27 | name = core_tasker.outputer.name 28 | schema_keys = tuple(core_tasker.schema.keys()) if isinstance(core_tasker.schema, dict) else None 29 | self.tasker.run(executor, session_config, manager) 30 | 31 | query = database.query(name, ["id"]) 32 | datas = query.commit() 33 | delete = database.delete(name, ["id"]) 34 | delete.commit() 35 | 36 | if not schema_keys: 37 | value = datas[0] if len(datas) == 1 else datas 38 | if len(self.config["variables"]) == 1 and isinstance(value, dict) and len(value) == 1: 39 | value = list(value.values())[0] 40 | elif len(schema_keys) == 1: 41 | if len(datas) == 1: 42 | value = datas[0][schema_keys[0]] if schema_keys[0] in datas[0] else None 43 | else: 44 | value = [data[schema_keys[0]] for data in datas if schema_keys[0] in data] 45 | else: 46 | if len(datas) == 1: 47 | value = {key: datas[0].get(key) for key in schema_keys} 48 | else: 49 | value = [{key: data.get(key) for key in schema_keys} for data in datas] 50 | 51 | if len(self.config["variables"]) == 1: 52 | if value is None: 53 | if self.config["variables"][0] in executor.env_variables: 54 | executor.env_variables[self.config["variables"][0]] = None 55 | else: 56 | executor.env_variables[self.config["variables"][0]] = value 57 | else: 58 | for i in range(len(self.config["variables"])): 59 | if i >= len(schema_keys): 60 | if self.config["variables"][i] in executor.env_variables: 61 | executor.env_variables[self.config["variables"][i]] = None 62 | continue 63 | if isinstance(value, dict): 64 | executor.env_variables[self.config["variables"][i]] = value[schema_keys[i]] 65 | else: 66 | executor.env_variables[self.config["variables"][i]] = [v[schema_keys[i]] for v in value] 67 | finally: 68 | self.tasker = None 69 | 70 | def terminate(self): 71 | if not self.tasker: 72 | return 73 | self.tasker.terminate() 74 | self.tasker = None 75 | -------------------------------------------------------------------------------- /syncanysql/taskers/set.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/13 3 | # create by: snower 4 | 5 | class SetCommandTasker(object): 6 | def __init__(self, config): 7 | self.config = config 8 | 9 | def start(self, name, executor, session_config, manager, arguments): 10 | key, is_global = self.config["key"], False 11 | if key[:6].lower() == "global": 12 | key, is_global = key[6:].strip(), True 13 | name = key.split(".")[0][1:] 14 | if name in ("databases", "imports", "sources", "defines", "variables", "options", "caches"): 15 | self.set_config(session_config, key[1:], is_global) 16 | elif name == "virtual_views": 17 | self.set_config(session_config, "databases." + key[1:], is_global) 18 | elif key[:7] == "@config": 19 | self.set_config(session_config, key[8:].strip(), is_global) 20 | else: 21 | try: 22 | self.set_env_variable(executor, key, self.parse_value(self.config["value"].strip()), is_global) 23 | except ValueError as e: 24 | if key[:1] == "@": 25 | try: 26 | executor.compile(name, "select " + self.config["value"].strip() + " into " + key) 27 | return [] 28 | except Exception as e: 29 | raise ValueError("unknown value: %s" % self.config["value"].strip()) 30 | raise e 31 | return [] 32 | 33 | def set_config(self, session_config, key, is_global=False): 34 | if is_global: 35 | session_config.global_config.set(key, self.parse_value(self.config["value"].strip())) 36 | session_config.merge() 37 | return 38 | session_config.set(key, self.parse_value(self.config["value"].strip())) 39 | 40 | def set_env_variable(self, executor, key, value, is_global=False): 41 | if is_global: 42 | executor.global_env_variables[key] = value 43 | return 44 | executor.env_variables[key] = value 45 | 46 | def parse_value(self, value): 47 | if not isinstance(value, str): 48 | return value 49 | if value[:1] in ('"', "'"): 50 | if value[:3] in ("'''", '"""'): 51 | return value[3:-3].strip() 52 | return value[1:-1].strip() 53 | if value.lower() == 'true': 54 | return True 55 | if value.lower() == 'false': 56 | return False 57 | if value.lower() == 'null': 58 | return None 59 | if value.isdigit() or (value[0] == "-" and value[1:].isdigit()): 60 | return int(value) 61 | value_info = value.split(".") 62 | if len(value_info) == 2 and (value_info[0].isdigit() or (value[0][0] == "-" and value[0][1:].isdigit())) \ 63 | and value_info[1].isdigit(): 64 | return float(value) 65 | raise ValueError("unknown value: %s" % value) 66 | 67 | def run(self, executor, session_config, manager): 68 | pass 69 | 70 | def terminate(self): 71 | pass -------------------------------------------------------------------------------- /syncanysql/taskers/show.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/27 3 | # create by: snower 4 | 5 | import copy 6 | 7 | 8 | class ShowCommandTasker(object): 9 | def __init__(self, config): 10 | self.config = config 11 | 12 | def start(self, name, executor, session_config, manager, arguments): 13 | if self.config.get("key").lower() == "databases": 14 | self.show_databases(session_config) 15 | elif self.config.get("key").lower() == "imports": 16 | self.show_imports(session_config) 17 | return [] 18 | 19 | def run(self, executor, session_config, manager): 20 | pass 21 | 22 | def terminate(self): 23 | pass 24 | 25 | def show_databases(self, session_config): 26 | databases = copy.deepcopy(session_config.get().get("databases", [])) 27 | fields, datas = ["name", "driver", "params", "virtual_views"], [] 28 | for database in databases: 29 | virtual_views = database.pop("virtual_views", None) 30 | datas.append({ 31 | "name": database.pop("name"), 32 | "driver": database.pop("driver"), 33 | "params": database, 34 | "virtual_views": virtual_views if virtual_views else "", 35 | }) 36 | self.print(fields, datas) 37 | 38 | def show_imports(self, session_config): 39 | imports = copy.deepcopy(session_config.get().get("imports", {})) 40 | fields, datas = ["name", "alias"], [] 41 | for alias, name in imports.items(): 42 | datas.append({ 43 | "name": name, 44 | "alias": alias 45 | }) 46 | self.print(fields, datas) 47 | 48 | def print(self, fields, datas): 49 | try: 50 | import rich 51 | from rich.table import Table 52 | except ImportError: 53 | print("\t".join(fields)) 54 | for data in datas: 55 | print("\t".join([str(data[field]) for field in fields])) 56 | return 57 | 58 | table = Table(show_header=True, collapse_padding=True, expand=True, highlight=True) 59 | for field in fields: 60 | table.add_column(field) 61 | for data in datas: 62 | table.add_row(*(str(data[field]) for field in fields)) 63 | rich.print(table) -------------------------------------------------------------------------------- /syncanysql/taskers/use.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/25 3 | # create by: snower 4 | 5 | import os 6 | 7 | 8 | class UseCommandTasker(object): 9 | def __init__(self, config): 10 | self.config = config 11 | 12 | def start(self, name, executor, session_config, manager, arguments): 13 | use_info = [s.strip() for s in self.config["use"].split(" as ")] 14 | if not use_info[0] or isinstance(use_info[0], (bool, int, float, list, tuple, set, dict)): 15 | return [] 16 | if isinstance(use_info[0], str): 17 | __import__(use_info[0], globals(), locals(), [use_info[0].rpartition(".")[-1]]) 18 | if len(use_info) >= 2: 19 | if os.path.exists(use_info[0]): 20 | session_config.set("sources." + use_info[1], use_info[0]) 21 | else: 22 | session_config.set("imports." + use_info[1], use_info[0]) 23 | else: 24 | if os.path.exists(use_info[0]): 25 | session_config.set("sources." + use_info[0].split(".")[-1], use_info[0]) 26 | else: 27 | session_config.set("imports." + use_info[0].split(".")[-1], use_info[0]) 28 | return [] 29 | 30 | def run(self, executor, session_config, manager): 31 | pass 32 | 33 | def terminate(self): 34 | pass 35 | -------------------------------------------------------------------------------- /syncanysql/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/19 3 | # create by: snower 4 | 5 | from decimal import Decimal 6 | import datetime 7 | import string 8 | 9 | NumberTypes = (int, float) 10 | NumberDecimalTypes = (int, float, Decimal) 11 | NumberStringTypes = (int, float, str) 12 | SequenceTypes = (tuple, list) 13 | 14 | 15 | def parse_value(value): 16 | value = value.strip() 17 | if not value: 18 | return "" 19 | if len(value) >= 2 and value[0] in ("'", '"') and value[-1] in ("'", '"'): 20 | return value[1:-1] 21 | if value.isdigit() or (value[0] == "-" and value[1:].isdigit()): 22 | return int(value) 23 | if value.lower() == "true": 24 | return True 25 | if value.lower() == "false": 26 | return False 27 | if value.lower() == "null": 28 | return None 29 | value_info = value.split(".") 30 | if len(value_info) == 2 and (value_info[0].isdigit() or (value[0][0] == "-" and value[0][1:].isdigit())) \ 31 | and value_info[1].isdigit(): 32 | return float(value) 33 | return value 34 | 35 | def parse_number(x, is_float=False): 36 | index, dot_index = -1, -1 37 | for i in range(len(x)): 38 | if x[i] in string.digits: 39 | index = i 40 | continue 41 | if index >= 0 and is_float and dot_index < 0 and x[i] == ".": 42 | dot_index = i 43 | continue 44 | break 45 | if index < 0: 46 | return float(x) if is_float else int(x) 47 | return float(x[:index + 1]) if dot_index > 0 else int(x[:index + 1]) 48 | 49 | def ensure_int(x): 50 | if isinstance(x, int): 51 | return x 52 | if x is None: 53 | raise ValueError('value is None') 54 | if not x: 55 | return 0 56 | if x is True: 57 | return 1 58 | if isinstance(x, datetime.date): 59 | if isinstance(x, datetime.datetime): 60 | return int(x.strftime("%Y%m%d%H%M%S")) 61 | return int(x.strftime("%Y%m%d")) 62 | if isinstance(x, datetime.time): 63 | return int(x.strftime("%H%M%S")) 64 | if isinstance(x, str): 65 | try: 66 | return int(x) 67 | except: 68 | return parse_number(x, False) 69 | return int(x) 70 | 71 | def ensure_float(x): 72 | if isinstance(x, float): 73 | return x 74 | if x is None: 75 | raise ValueError('value is None') 76 | if not x: 77 | return 0 78 | if x is True: 79 | return 1 80 | if isinstance(x, datetime.date): 81 | if isinstance(x, datetime.datetime): 82 | return float(x.strftime("%Y%m%d%H%M%S")) + x.microsecond / 1000 83 | return float(x.strftime("%Y%m%d")) 84 | if isinstance(x, datetime.time): 85 | return float(x.strftime("%H%M%S")) + x.microsecond / 1000 86 | if isinstance(x, str): 87 | try: 88 | return float(x) 89 | except: 90 | return parse_number(x, True) 91 | return float(x) 92 | 93 | def ensure_number(x): 94 | if isinstance(x, NumberDecimalTypes): 95 | return x 96 | if x is None: 97 | raise ValueError('value is None') 98 | if not x: 99 | return 0 100 | if x is True: 101 | return 1 102 | if isinstance(x, str): 103 | if "." in x: 104 | try: 105 | return float(x) 106 | except: 107 | return ensure_float(x) 108 | try: 109 | return int(x) 110 | except: 111 | return ensure_int(x) 112 | if isinstance(x, datetime.date): 113 | if isinstance(x, datetime.datetime): 114 | return int(x.strftime("%Y%m%d%H%M%S")) 115 | return int(x.strftime("%Y%m%d")) 116 | if isinstance(x, datetime.time): 117 | return int(x.strftime("%H%M%S")) 118 | return ensure_int(x) 119 | 120 | def ensure_str(x): 121 | if isinstance(x, str): 122 | return x 123 | if x is None: 124 | raise ValueError('value is None') 125 | if not x: 126 | return '0' 127 | if x is True: 128 | return '1' 129 | if isinstance(x, bytes): 130 | return x.decode("utf-8") 131 | if isinstance(x, datetime.date): 132 | if isinstance(x, datetime.datetime): 133 | return x.strftime("%Y-%m-%d %H:%M:%S") 134 | return x.strftime("%Y-%m-%d") 135 | if isinstance(x, datetime.time): 136 | return x.strftime("%H:%M:%S") 137 | return str(x) 138 | -------------------------------------------------------------------------------- /syncanysql/version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/2/27 3 | # create by: snower 4 | 5 | version = "0.1.24" 6 | version_info = (0, 1, 24) 7 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | -------------------------------------------------------------------------------- /tests/example.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | import sys 6 | import os 7 | from unittest import TestCase 8 | from syncanysql import ScriptEngine, Executor 9 | from syncany.database.memory import MemoryDBCollection, MemoryDBFactory 10 | from syncanysql.parser import FileParser 11 | 12 | 13 | class ExampleTestCase(TestCase): 14 | script_engine = None 15 | example_name = None 16 | execute_results = {} 17 | 18 | def get_executor(self): 19 | if self.script_engine is None: 20 | self.__class__.script_engine = ScriptEngine() 21 | if self.script_engine.executor is None: 22 | self.script_engine.setup() 23 | return Executor(self.script_engine.manager, self.script_engine.executor.session_config.session(), 24 | self.script_engine.executor) 25 | 26 | 27 | def execute(self, filename): 28 | fileParser = FileParser(os.path.join("examples", self.example_name, filename)) 29 | sqls = fileParser.load() 30 | for sql in sqls: 31 | if sql.sql[:6].lower() != "select" or "into" in sql.sql: 32 | continue 33 | sql.sql = "INSERT INTO `__test__%s_%s` %s" % (self.__class__.__name__, sql.lineno, sql.sql) 34 | 35 | cwd = os.getcwd() 36 | os.chdir(os.path.join("examples", self.example_name)) 37 | sys.path.insert(0, os.path.abspath(os.getcwd())) 38 | try: 39 | with self.get_executor() as executor: 40 | executor.run("execute[%s]" % self.__class__.__name__, sqls) 41 | executor.execute() 42 | 43 | self.execute_results = {} 44 | for config_key, factory in self.script_engine.manager.database_manager.factorys.items(): 45 | if not isinstance(factory, MemoryDBFactory): 46 | continue 47 | for driver in factory.drivers: 48 | if not isinstance(driver.instance, MemoryDBCollection): 49 | continue 50 | for key in list(driver.instance.keys()): 51 | if key.startswith("--.__test__"): 52 | self.execute_results[key[3:]] = driver.instance[key] 53 | driver.instance.remove(key) 54 | self.script_engine.manager.database_manager.states.clear() 55 | finally: 56 | sys.path.pop(0) 57 | os.chdir(cwd) 58 | 59 | def assert_data(self, lineno, checker, error_msg): 60 | execute_result = self.execute_results.get("__test__%s_%s" % (self.__class__.__name__, lineno)) 61 | assert execute_result is not None, error_msg 62 | if callable(checker): 63 | self.assertTrue(checker(execute_result), error_msg) 64 | else: 65 | self.assertEqual(execute_result, checker, error_msg) 66 | 67 | def assert_value(self, lineno, key, checker, error_msg, index=0): 68 | execute_result = self.execute_results.get("__test__%s_%s" % (self.__class__.__name__, lineno)) 69 | assert execute_result is not None, error_msg 70 | assert isinstance(execute_result, list) and len(execute_result) > index, error_msg 71 | assert key in execute_result[index], error_msg 72 | if callable(checker): 73 | self.assertTrue(checker(execute_result[index][key]), error_msg) 74 | else: 75 | self.assertEqual(execute_result[index][key], checker, error_msg) 76 | -------------------------------------------------------------------------------- /tests/test_example_datetime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | import datetime 6 | from .example import ExampleTestCase 7 | 8 | 9 | class DatetimeExampleTestCase(ExampleTestCase): 10 | example_name = "datetime" 11 | 12 | def test_datetime(self): 13 | self.execute("datetime.sql") 14 | 15 | self.assert_value(2, "NOW()", lambda value: isinstance(value, datetime.datetime), "data error") 16 | 17 | self.assert_value(4, "NOW(0)", lambda value: isinstance(value, datetime.datetime), "data error") 18 | self.assert_value(4, "NOW('-1d')", lambda value: isinstance(value, datetime.datetime), "data error") 19 | self.assert_value(4, "NOW('+3d')", lambda value: isinstance(value, datetime.datetime), "data error") 20 | self.assert_value(4, "NOW('-3d', 0)", lambda value: isinstance(value, datetime.datetime), "data error") 21 | self.assert_value(4, "NOW('-3d', 0, 10, 11)", lambda value: isinstance(value, datetime.datetime), "data error") 22 | 23 | self.assert_value(6, "DATE_ADD(NOW(), 1, '')", lambda value: isinstance(value, datetime.datetime), "data error") 24 | self.assert_value(6, "ADDDATE(NOW(), INTERVAL '20' DAY)", lambda value: isinstance(value, datetime.datetime), 25 | "data error") 26 | self.assert_value(6, "DATE_SUB(NOW(), INTERVAL '10' DAY)", lambda value: isinstance(value, datetime.datetime), 27 | "data error") 28 | self.assert_value(6, "SUBDATE(NOW(), INTERVAL '7' MONTH)", lambda value: isinstance(value, datetime.datetime), 29 | "data error") 30 | 31 | self.assert_value(8, "ADDTIME(NOW(), '10:00')", lambda value: isinstance(value, datetime.datetime), 32 | "data error") 33 | self.assert_value(8, "SUBTIME(NOW(), '1 10:00')", lambda value: isinstance(value, datetime.datetime), 34 | "data error") 35 | 36 | self.assert_value(10, "DATE_FORMAT(DATETIME('2023-04-24 17:07:08'), '%Y-%m-%d %H:%M:%S')", 37 | '2023-04-24 17:07:08', "data error") 38 | self.assert_value(10, "TIME_FORMAT(DATETIME('2023-04-24 17:07:08'), '%H:%M:%S')", '17:07:08', "data error") 39 | self.assert_value(10, "TIME_TO_SEC('10:11:00')", 36660, "data error") 40 | self.assert_value(10, "SEC_TO_TIME(234)", '00:03:54', "data error") 41 | 42 | self.assert_value(12, "CURDATE()", lambda value: isinstance(value, datetime.date), "data error") 43 | self.assert_value(12, "CURRENT_DATE", lambda value: isinstance(value, datetime.date), "data error") 44 | self.assert_value(12, "CURRENT_TIME()", lambda value: isinstance(value, datetime.time), "data error") 45 | self.assert_value(12, "CURTIME()", lambda value: isinstance(value, datetime.time), "data error") 46 | 47 | self.assert_value(14, "FROM_UNIXTIME(1677833819)", lambda value: isinstance(value, datetime.datetime), 48 | "data error") 49 | self.assert_value(14, "UNIX_TIMESTAMP()", lambda value: isinstance(value, int), "data error") 50 | self.assert_value(14, "UNIX_TIMESTAMP(NOW())", lambda value: isinstance(value, int), "data error") 51 | self.assert_value(14, "CURRENT_TIMESTAMP()", lambda value: isinstance(value, datetime.datetime), "data error") 52 | 53 | self.assert_value(16, "UTC_DATE()", lambda value: isinstance(value, datetime.date), "data error") 54 | self.assert_value(16, "UTC_TIME()", lambda value: isinstance(value, datetime.time), "data error") 55 | self.assert_value(16, "UTC_TIMESTAMP()", lambda value: isinstance(value, datetime.datetime), "data error") 56 | 57 | self.assert_value(18, "DATE(NOW())", lambda value: isinstance(value, datetime.date), "data error") 58 | self.assert_value(18, "DATETIME(NOW())", lambda value: isinstance(value, datetime.datetime), "data error") 59 | self.assert_value(18, "TIME(NOW())", lambda value: isinstance(value, datetime.time), "data error") 60 | self.assert_value(18, "DATETIME(DATE(NOW()))", lambda value: isinstance(value, datetime.datetime), "data error") 61 | self.assert_value(18, "DATETIME(TIME(NOW()))", lambda value: isinstance(value, datetime.datetime), "data error") 62 | self.assert_value(18, "DATE(TIME(NOW()))", None, "data error") 63 | self.assert_value(18, "TIME(DATE(NOW()))", lambda value: isinstance(value, datetime.time), "data error") 64 | 65 | self.assert_value(20, "DATE_FORMAT(CONVERT_DATETIME('1600-01-01'), '%Y-%m-%d %H:%M:%S')", 66 | '1600-01-01 00:00:00', "data error") 67 | self.assert_value(20, "DATE_FORMAT(CONVERT_DATETIME('1900-01-01 10:12:23'), '%Y-%m-%d %H:%M:%S')", 68 | '1900-01-01 10:12:23', "data error") 69 | self.assert_value(20, "TIME_FORMAT(CONVERT_DATETIME('1800-01-01 10:12:23'), '%H:%M:%S')", '10:12:23', "data error") 70 | -------------------------------------------------------------------------------- /tests/test_example_demo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class DemoExampleTestCase(ExampleTestCase): 9 | example_name = "demo" 10 | 11 | def test_demo(self): 12 | self.execute("demo.sql") 13 | 14 | self.assert_data(3, [{'site_id': 8, 'site_name': '黄豆网', 'site_amount': 17.04, 'timeout_at': '16:00:00', 15 | 'vip_timeout_at': '11:00:00'}, 16 | {'site_id': 15, 'site_name': '青菜网', 'site_amount': 7.2, 'timeout_at': '15:00:00', 17 | 'vip_timeout_at': '11:00:00'}, 18 | {'site_id': 21, 'site_name': '去啥网', 'site_amount': 0, 'timeout_at': '16:00:00', 19 | 'vip_timeout_at': '11:00:00'}, 20 | {'site_id': 26, 'site_name': '汽车网', 'site_amount': 0, 'timeout_at': '16:00:00', 21 | 'vip_timeout_at': '11:00:00'}, 22 | {'site_id': 28, 'site_name': '火箭网', 'site_amount': 0, 'timeout_at': '15:00:00', 23 | 'vip_timeout_at': '10:00:00'}, 24 | {'site_id': 34, 'site_name': '卫星网', 'site_amount': 11.2, 'timeout_at': '16:40:00', 25 | 'vip_timeout_at': '11:20:00'}], "data error") 26 | 27 | def test_demo2(self): 28 | self.execute("demo2.sql") 29 | 30 | self.assert_data(2, [{'site_id': 8, 'site_name': '黄豆网'}, {'site_id': 15, 'site_name': '青菜网'}, 31 | {'site_id': 21, 'site_name': '去啥网'}, {'site_id': 26, 'site_name': '汽车网'}, 32 | {'site_id': 8, 'site_name': '黄豆网'}, {'site_id': 21, 'site_name': '去啥网'}, 33 | {'site_id': 26, 'site_name': '汽车网'}, {'site_id': 15, 'site_name': '青菜网'}, 34 | {'site_id': 28, 'site_name': '火箭网'}, {'site_id': 15, 'site_name': '青菜网'}, 35 | {'site_id': 28, 'site_name': '火箭网'}, {'site_id': 34, 'site_name': '卫星网'}, 36 | {'site_id': 34, 'site_name': '卫星网'}], "data error") 37 | 38 | self.assert_data(5, [{'order_id': 6, 'site_id': 34, 'amount': 11.2, 'status': 0}, 39 | {'order_id': 1, 'site_id': 8, 'amount': 10, 'status': 0}], "data error") 40 | 41 | self.assert_data(7, [{'order_id': 6, 'site_id': 34, 'amount': 1120.0}, 42 | {'order_id': 4, 'site_id': 28, 'amount': 470.0}], "data error") 43 | 44 | self.assert_data(9, [{'order_id': 6, 'site_id': 34, 'amount': 1120.0}, 45 | {'order_id': 1, 'site_id': 8, 'amount': 1000}], "data error") 46 | 47 | self.assert_data(15, [{'site_id': 8, 'site_name': '黄豆网', 'site_amount': 17.04, 'timeout_at': '16:00:00', 48 | 'vip_timeout_at': '11:00:00'}, 49 | {'site_id': 15, 'site_name': '青菜网', 'site_amount': 7.2, 'timeout_at': '15:00:00', 50 | 'vip_timeout_at': '11:00:00'}, 51 | {'site_id': 21, 'site_name': '去啥网', 'site_amount': 0, 'timeout_at': '16:00:00', 52 | 'vip_timeout_at': '11:00:00'}, 53 | {'site_id': 26, 'site_name': '汽车网', 'site_amount': 0, 'timeout_at': '16:00:00', 54 | 'vip_timeout_at': '11:00:00'}, 55 | {'site_id': 28, 'site_name': '火箭网', 'site_amount': 0, 'timeout_at': '15:00:00', 56 | 'vip_timeout_at': '10:00:00'}, 57 | {'site_id': 34, 'site_name': '卫星网', 'site_amount': 11.2, 'timeout_at': '16:40:00', 58 | 'vip_timeout_at': '11:20:00'}], "data error") 59 | 60 | self.assert_data(17, [{'site_id': 8, 'site_name': '黄豆网', 'site_amount': 17.04, 'timeout_at': '16:00:00', 61 | 'vip_timeout_at': '11:00:00'}, 62 | {'site_id': 15, 'site_name': '青菜网', 'site_amount': 7.2, 'timeout_at': '15:00:00', 63 | 'vip_timeout_at': '10:00:00'}, 64 | {'site_id': 21, 'site_name': '去啥网', 'site_amount': 0, 'timeout_at': '16:00:00', 65 | 'vip_timeout_at': '11:00:00'}, 66 | {'site_id': 26, 'site_name': '汽车网', 'site_amount': 0, 'timeout_at': '16:00:00', 67 | 'vip_timeout_at': '11:00:00'}, 68 | {'site_id': 28, 'site_name': '火箭网', 'site_amount': 0, 'timeout_at': '15:00:00', 69 | 'vip_timeout_at': '10:00:00'}, 70 | {'site_id': 34, 'site_name': '卫星网', 'site_amount': 11.2, 'timeout_at': '16:40:00', 71 | 'vip_timeout_at': '11:20:00'}], "data error") 72 | -------------------------------------------------------------------------------- /tests/test_example_functions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | import uuid 6 | from bson.objectid import ObjectId 7 | from .example import ExampleTestCase 8 | 9 | 10 | class FunctionsExampleTestCase(ExampleTestCase): 11 | example_name = "functions" 12 | 13 | def test_generate(self): 14 | self.execute("generate.sql") 15 | 16 | self.assert_value(2, "a", lambda value: isinstance(value, ObjectId), "data error") 17 | self.assert_value(2, "b", ObjectId('65bb4211eda4fed2e199073e'), "data error") 18 | self.assert_value(2, "c", lambda value: isinstance(value, uuid.UUID), "data error") 19 | self.assert_value(2, "d", uuid.UUID('54aa0a5c-b54f-4628-8391-3756007d5fc3'), "data error") 20 | self.assert_value(2, "e", lambda value: isinstance(value, int), "data error") 21 | self.assert_value(2, "f", lambda value: isinstance(value, int), "data error") 22 | 23 | self.assert_value(4, "a", lambda value: isinstance(value, float), "data error") 24 | self.assert_value(4, "b", lambda value: isinstance(value, int), "data error") 25 | self.assert_value(4, "c", lambda value: isinstance(value, str), "data error") 26 | self.assert_value(4, "d", lambda value: isinstance(value, str), "data error") 27 | self.assert_value(4, "e", lambda value: isinstance(value, str), "data error") 28 | self.assert_value(4, "f", lambda value: isinstance(value, str), "data error") 29 | self.assert_value(4, "g", lambda value: isinstance(value, str), "data error") 30 | self.assert_value(4, "h", lambda value: isinstance(value, bytes), "data error") 31 | 32 | self.assert_value(6, "a", lambda value: isinstance(value, int), "data error") 33 | -------------------------------------------------------------------------------- /tests/test_example_get_value.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class GetValueExampleTestCase(ExampleTestCase): 9 | example_name = "get_value" 10 | 11 | def test_get_value(self): 12 | self.execute("get_value.sql") 13 | 14 | self.assert_data(4, [ 15 | {'orderId': 3243243, 'username': 'snower', 'payId': '23232323232323', 'refundId': ['111111']}], 16 | "data error") 17 | self.assert_data(6, [ 18 | {'orderId': 3243243, 'username': 'snower', 'payId': '23232323232323', 'refundId': ['111111']}], 19 | "data error") 20 | self.assert_data(8, [{'mobile1': '12345678911', 'mobile2': '12345678911'}], "data error") 21 | self.assert_data(10, [{'payTypes1': 1, 'payTypes2': 1, 'payTypes3': 1}], "data error") 22 | self.assert_data(12, [{'payTypes1': [1, 3], 'payTypes2': [1, 3], 'payTypes3': [1, 3]}], "data error") 23 | self.assert_data(14, [{'payTypes1': [4, 3], 'payTypes2': [4, 3], 'payTypes3': [4, 3]}], "data error") 24 | self.assert_data(16, [{'channel1': 'weixin', 'channel2': 'weixin'}], "data error") 25 | -------------------------------------------------------------------------------- /tests/test_example_import_python.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | import sys 6 | import os 7 | import datetime 8 | from .example import ExampleTestCase 9 | 10 | 11 | class ImportPythonExampleTestCase(ExampleTestCase): 12 | example_name = "import_python" 13 | 14 | def test_import_python(self): 15 | self.execute("import_python.sql") 16 | 17 | self.assert_value(8, "UTILS$HELLO()", 'hello world!', "data error") 18 | 19 | self.assert_value(10, "UTILS$ADD_NUMBER(1, 2)", 3, "data error") 20 | self.assert_value(10, "UTILS$SUM_ARRAY((1, 2, 3))", 6, "data error") 21 | 22 | self.assert_value(12, "PARSING$PARSE('2023-02-10 10:33:22')", 23 | lambda value: isinstance(value, datetime.datetime), "data error") 24 | 25 | self.assert_value(14, "SYS$VERSION()", sys.version, "data error") 26 | self.assert_value(14, "OS$GETCWD()", lambda value: os.getcwd() in value, "data error") 27 | 28 | self.assert_value(16, "PYTHON_DATETIME$DATETIME$NOW()", 29 | lambda value: isinstance(value, datetime.datetime), "data error") 30 | -------------------------------------------------------------------------------- /tests/test_example_insert_types.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class InsertTypesExampleTestCase(ExampleTestCase): 9 | example_name = "insert_types" 10 | 11 | def test_delete_insert(self): 12 | self.execute("delete_insert.sql") 13 | 14 | self.assert_data(10, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 15 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 16 | {'id': 3, 'name': '油麦菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 17 | 18 | def test_insert(self): 19 | self.execute("insert.sql") 20 | 21 | self.assert_data(10, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 22 | {'id': 2, 'name': '土豆', 'create_time': '2023-03-12 10:12:34'}, 23 | {'id': 4, 'name': '花菜', 'create_time': '2023-03-12 10:12:34'}, 24 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 25 | {'id': 3, 'name': '油麦菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 26 | 27 | def test_update_delete_insert(self): 28 | self.execute("update_delete_insert.sql") 29 | 30 | self.assert_data(10, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 31 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 32 | {'id': 3, 'name': '油麦菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 33 | 34 | self.assert_data(21, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 35 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 36 | {'id': 3, 'name': '油麦菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 37 | 38 | def test_update_insert(self): 39 | self.execute("update_insert.sql") 40 | 41 | self.assert_data(10, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 42 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 43 | {'id': 4, 'name': '花菜', 'create_time': '2023-03-12 10:12:34'}, 44 | {'id': 3, 'name': '油麦菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 45 | 46 | def test_update(self): 47 | self.execute("update.sql") 48 | 49 | self.assert_data(10, [{'id': 1, 'name': '萝卜', 'create_time': '2023-03-12 10:12:34'}, 50 | {'id': 2, 'name': '青菜', 'create_time': '2023-03-12 10:12:34'}, 51 | {'id': 4, 'name': '花菜', 'create_time': '2023-03-12 10:12:34'}], "data error") 52 | -------------------------------------------------------------------------------- /tests/test_example_json.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class JsonExampleTestCase(ExampleTestCase): 9 | example_name = "json" 10 | 11 | def test_json(self): 12 | self.execute("json.sql") 13 | 14 | self.assert_data(4, [{"JSON_CONTAINS(@j, @j2, '$.a')": 0}], "data error") 15 | 16 | self.assert_data(5, [{"JSON_CONTAINS(@j, @j2, '$.b')": 0}], "data error") 17 | 18 | self.assert_data(7, [{"JSON_CONTAINS(@j, @j2, '$.a')": 0}], "data error") 19 | 20 | self.assert_data(8, [{"JSON_CONTAINS(@j, @j2, '$.c')": 1}], "data error") 21 | 22 | self.assert_data(11, [{"JSON_CONTAINS_PATH(@j, 'one', '$.a', '$.e')": 1}], "data error") 23 | 24 | self.assert_data(12, [{"JSON_CONTAINS_PATH(@j, 'all', '$.a', '$.e')": 0}], "data error") 25 | 26 | self.assert_data(13, [{"JSON_CONTAINS_PATH(@j, 'one', '$.c.d')": 1}], "data error") 27 | 28 | self.assert_data(14, [{"JSON_CONTAINS_PATH(@j, 'one', '$.a.d')": 0}], "data error") 29 | 30 | self.assert_data(16, [{"JSON_EXTRACT('[10, 20, [30, 40]]', '$[1]')": 20}], "data error") 31 | 32 | self.assert_data(17, [{"JSON_EXTRACT('[10, 20, [30, 40]]', '$[2][*]')": [30, 40]}], "data error") 33 | 34 | self.assert_data(18, [{'JSON_EXTRACT(\'[10, 20, [{"a":30}, {"b":40}]]\', \'$[2][*]["a"]\')': [30]}], 35 | "data error") 36 | 37 | self.assert_data(20, [{"JSON_DEPTH('{}')": 1, "JSON_DEPTH('[]')": 1, "JSON_DEPTH('true')": 1}], "data error") 38 | 39 | self.assert_data(21, [{"JSON_DEPTH('[10, 20]')": 2, "JSON_DEPTH('[[], {}]')": 2}], "data error") 40 | 41 | self.assert_data(22, [{'JSON_DEPTH(\'[10, {"a": 20}]\')': 3}], "data error") 42 | 43 | self.assert_data(24, [{'JSON_KEYS(\'{"a": 1, "b": {"c": 30}}\')': ['a', 'b']}], "data error") 44 | 45 | self.assert_data(25, [{'JSON_KEYS(\'{"a": 1, "b": {"c": 30}}\', \'$.b\')': ['c']}], "data error") 46 | 47 | self.assert_data(27, [{'JSON_LENGTH(\'[1, 2, {"a": 3}]\')': 3}], "data error") 48 | 49 | self.assert_data(28, [{'JSON_LENGTH(\'{"a": 1, "b": {"c": 30}}\')': 2}], "data error") 50 | 51 | self.assert_data(29, [{'JSON_LENGTH(\'{"a": 1, "b": {"c": 30}}\', \'$.b\')': 1}], "data error") 52 | 53 | self.assert_data(31, [{'JSON_VALID(\'{"a": 1}\')': 1}], "data error") 54 | 55 | self.assert_data(32, [{"JSON_VALID('hello')": 0, 'JSON_VALID(\'"hello"\')': 1}], "data error") 56 | 57 | self.assert_data(35, [{"JSON_SET(@j, '$.a', 2)": {'a': 2, 'b': 2, 'c': {'d': 4}}, 58 | "JSON_SET(@j, '$.c.d', 2)": {'a': 1, 'b': 2, 'c': {'d': 2}}}], "data error") 59 | 60 | self.assert_data(36, [ 61 | {'JSON_SET(\'"1"\', \'$[0]\', \'a\')': 'a', 'JSON_SET(\'"1"\', \'$[2]\', \'a\')': ['1', 'a']}], 62 | "data error") 63 | 64 | self.assert_data(37, [ 65 | {'JSON_SET(\'["1"]\', \'$[0]\', \'a\')': ['a'], 'JSON_SET(\'["1"]\', \'$[2]\', \'a\')': ['1', 'a']}], 66 | "data error") 67 | 68 | self.assert_data(39, [{"JSON_REMOVE(@j, '$.a', '$.c.d')": {'b': 2, 'c': {}}, 69 | "JSON_REMOVE(@j, '$.c.a')": {'a': 1, 'b': 2, 'c': {'d': 4}}}], "data error") 70 | 71 | self.assert_data(40, [{'JSON_REMOVE(\'"1"\', \'$[0]\')': '1', 'JSON_REMOVE(\'"1"\', \'$[2]\')': '1'}], 72 | "data error") 73 | 74 | self.assert_data(41, [{'JSON_REMOVE(\'["1"]\', \'$[0]\')': [], 'JSON_REMOVE(\'["1"]\', \'$[2]\')': ['1']}], 75 | "data error") 76 | -------------------------------------------------------------------------------- /tests/test_example_logic_operation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/9/27 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class LogicOperationExampleTestCase(ExampleTestCase): 9 | example_name = "logic_operation" 10 | 11 | def test_logic_operation(self): 12 | self.execute("logic_operation.sql") 13 | 14 | self.assert_data(2, [{'a': 1, 'b': 1, 'c': 0, 'd': 1, 'f': 1}], "data error") 15 | 16 | self.assert_data(4, [{'a': 1, 'b': 0}], "data error") 17 | 18 | self.assert_data(6, [{'a': 1, 'b': 1, 'c': 0, 'd': 1}], "data error") 19 | 20 | self.assert_data(8, [{'a': 0, 'b': 0, 'c': 1, 'd': 0, 'f': 0}], "data error") 21 | 22 | self.assert_data(10, [{'a': 0, 'b': 1, 'c': 0, 'd': 0}], "data error") 23 | 24 | self.assert_data(12, [{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1, 'f': 0}], "data error") 25 | 26 | self.assert_data(14, [{'a': 0, 'b': 0, 'c': 0, 'd': 0, 'e': 0, 'f': 1}], "data error") 27 | 28 | self.assert_data(16, [{'a': 1, 'b': 0, 'c': 1, 'd': 0}], "data error") 29 | 30 | self.assert_data(18, [{'a': 0, 'b': 1, 'c': 0, 'd': 1}], "data error") 31 | 32 | self.assert_data(20, [{'a': 1, 'b': 'B'}], "data error") 33 | 34 | self.assert_data(22, [{'a': 36.2, 'b': 1, 'c': 'C'}], "data error") 35 | 36 | self.assert_data(24, [{'order_id': 2, 'uid': 1, 'goods_id': 1, 'amount': 0.6}], "data error") 37 | 38 | self.assert_data(26, [{'order_id': 2, 'uid': 1, 'goods_id': 1, 'amount': 0.6, 'status': 0}], "data error") 39 | 40 | self.assert_data(28, [{'a': 0, 'b': 1, 'c': 4, 'd': 'a X c', 'e': 'abc'}], "data error") 41 | 42 | self.assert_data(30, [{'a': 1, 'b': 0, 'c': 12, 'd': 'abc X ghi', 'e': 'def'}], "data error") -------------------------------------------------------------------------------- /tests/test_example_loop.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/24 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class LoopExampleTestCase(ExampleTestCase): 9 | example_name = "loop" 10 | 11 | def test_loop(self): 12 | self.execute("loop.sql") 13 | 14 | self.assert_data(15, [{'n': 1}, {'n': 2}, {'n': 3}, {'n': 4}], "data error") -------------------------------------------------------------------------------- /tests/test_example_mathematical.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/24 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class MathematicalExampleTestCase(ExampleTestCase): 9 | example_name = "mathematical" 10 | 11 | def test_mathematical(self): 12 | self.execute("mathematical.sql") 13 | 14 | self.assert_data(2, [{"a": 3, "b": 1, "c": 2, "d": 2}], "data error") 15 | self.assert_data(4, [{"a": 0, "b": 3, "c": 3, "d": -3}], "data error") 16 | self.assert_data(6, [{"a": 3, "b": 2022, "c": 222}], "data error") 17 | self.assert_data(8, [{"a": None, "b": 2, "c": 1}], "data error") 18 | self.assert_data(10, [{"a": 10115201050403, "b": 10115201, "c": 50403}], "data error") 19 | self.assert_data(12, [{"a": [2, 3, 4], "b": [4, 8, 12]}], "data error") 20 | self.assert_data(14, [{"a": 5, "b": 3}], "data error") 21 | -------------------------------------------------------------------------------- /tests/test_example_memory_temporary_storage.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class MemoryTemporaryStorageExampleTestCase(ExampleTestCase): 9 | example_name = "memory_temporary_storage" 10 | 11 | def test_memory_temporary_storage(self): 12 | self.execute("memory_temporary_storage.sql") 13 | 14 | self.assert_data(6, [ 15 | {'Id': 'aa7e941b-d399-4bec-0ba4-08d8dd2f9239', 'Email': 'bm6U11zDIspdNW1iQiVZdHX8uqOWZe0cers9BZEcCrE=', 16 | 'FirstName': 'John', 'LastName': 'Doe'}], "data error") 17 | self.assert_data(8, [{'Id': 'aa7e941b-d399-4bec-0ba4-08d8dd2f9239', 'Name': 'LiMei'}], "data error") 18 | self.assert_data(10, [ 19 | {'Id': 'aa7e941b-d399-4bec-0ba4-08d8dd2f9239', 'Email': 'bm6U11zDIspdNW1iQiVZdHX8uqOWZe0cers9BZEcCrE=', 20 | 'Name': 'LiMei', 'FirstName': 'John', 'LastName': 'Doe'}], "data error") 21 | -------------------------------------------------------------------------------- /tests/test_example_nginx_log.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class NginxLogExampleTestCase(ExampleTestCase): 9 | example_name = "nginx-log" 10 | 11 | def test_ip_top3(self): 12 | self.execute("ip-top-3.sql") 13 | 14 | self.assert_data(3, [{'cnt': 22, 'ip': '54.37.79.75'}, {'cnt': 14, 'ip': '143.110.222.166'}, 15 | {'cnt': 9, 'ip': '35.216.169.119'}], "data error") 16 | -------------------------------------------------------------------------------- /tests/test_example_parameter_variable.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/25 3 | # create by: snower 4 | 5 | import os 6 | from .example import ExampleTestCase 7 | 8 | 9 | class ParameterVariableExampleTestCase(ExampleTestCase): 10 | example_name = "parameter_variable" 11 | 12 | def test_parameter_variable(self): 13 | self.execute("parameter_variable.sql") 14 | 15 | self.assert_data(8, [{'a': 1, 'b': 2, 'c': os.environ.get("PATH", "")}], "data error") 16 | 17 | self.assert_data(9, [{'a': 3, 'b': 4.0, 'c': len(os.environ.get("PATH", "").encode("utf-8"))}], "data error") 18 | 19 | self.assert_data(22, [ 20 | {'a': 2, 'b': [1, 3], 'c': {'a': 1, 'b': 'abc'}, 'd': [{'a': 1, 'b': 'abc'}, {'a': 3, 'b': 'efg'}]}], 21 | "data error") 22 | 23 | def test_parameter_assign(self): 24 | self.execute("parameter_assign.sql") 25 | 26 | self.assert_data(4, [{'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 4}, {'a': 4, 'b': 5, 'c': 6, 'd': 7, 'e': 7}, 27 | {'a': 7, 'b': 8, 'c': 9, 'd': 10, 'e': 10}, {'a': 10, 'b': 11, 'c': 12, 'd': 13, 'e': 13}, 28 | {'a': 13, 'b': 14, 'c': 15, 'd': 16, 'e': 16}], "data error") 29 | 30 | self.assert_data(6, [{'a': 1, 'b': None, 'c': 1, 'd': 17}, {'a': 2, 'b': 1, 'c': 2, 'd': 18}, 31 | {'a': 3, 'b': 2, 'c': 3, 'd': 19}, {'a': 4, 'b': 3, 'c': 4, 'd': 20}, 32 | {'a': 5, 'b': 4, 'c': 5, 'd': 21}], "data error") 33 | 34 | self.assert_data(8, [{'a': 1, 'b': None, 'c': 1, 'd': 22}, {'a': 2, 'b': 1, 'c': 2, 'd': 23}, 35 | {'a': 3, 'b': 2, 'c': 3, 'd': 24}, {'a': 4, 'b': 3, 'c': 4, 'd': 25}, 36 | {'a': 5, 'b': 4, 'c': 5, 'd': 26}], "data error") 37 | 38 | self.assert_data(14, [ 39 | {'uid': 1, 'cnt': 2, 'amount': 55, 'start_at': '2023-01-12 10:11:12', 'end_at': '2023-07-12 22:11:12'}, 40 | {'uid': 1, 'cnt': 1, 'amount': 322, 'start_at': '2023-07-23 12:11:12', 'end_at': '2023-10-12 15:11:12'}, 41 | {'uid': 2, 'cnt': 2, 'amount': 3450, 'start_at': '2023-03-05 00:11:12', 'end_at': '2023-08-12 15:11:12'}], 42 | "data error") -------------------------------------------------------------------------------- /tests/test_example_pyeval.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2025/4/3 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class PyEvalExampleTestCase(ExampleTestCase): 9 | example_name = "pyeval" 10 | 11 | def test_pyeval(self): 12 | self.execute("pyeval.sql") 13 | 14 | self.assert_data(2, [{'a': 3, 'b': '0,1,2,3', 'c': 3, 'd': '123'}], 15 | "data error") -------------------------------------------------------------------------------- /tests/test_example_strings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/24 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class StringsExampleTestCase(ExampleTestCase): 9 | example_name = "strings" 10 | 11 | def test_strings(self): 12 | self.execute("strings.sql") 13 | 14 | self.assert_data(2, [{"a": "abc", "b": "ab", "c": "abc", "d": "ABC"}], "data error") 15 | self.assert_data(4, [{"a": "a b c", "b": "aaa", "c": "cba", "d": -1}], "data error") 16 | self.assert_data(6, [{"a": 1, "b": 1, "c": 1}], "data error") 17 | self.assert_data(8, [{"a": None, "b": 'a 2023-04-02 10:08:06 2023-04-02 10:08:06 1 0 1 1.23'}], "data error") 18 | -------------------------------------------------------------------------------- /tests/test_example_subquery.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2025/3/25 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class SubqueryExampleTestCase(ExampleTestCase): 9 | example_name = "subquery" 10 | 11 | def test_subquery(self): 12 | self.execute("subquery.sql") 13 | 14 | self.assert_data(1, [{'cnt': 4, 'latest_order_id': 6, 'total_amount': 27.6, 'uid': 2}], "data error") 15 | self.assert_data(5, [{'amount': 3, 16 | 'history_count': 5, 17 | 'history_exists': 1, 18 | 'order_id': 3, 19 | 'uid': 2}], "data error") 20 | self.assert_data(9, [{'goods_id': 1, 'goods_name': '青菜', 'order_count': 4, 'order_exists': 1}, 21 | {'goods_id': 2, 'goods_name': '白菜', 'order_count': 2, 'order_exists': 1}, 22 | {'goods_id': 3, 'goods_name': '萝卜', 'order_count': None, 'order_exists': 0}], 23 | "data error") 24 | self.assert_data(13, [{'amount': 3, 25 | 'history_count': 5, 26 | 'history_exists': 1, 27 | 'order_id': 3, 28 | 'uid': 2}], "data error") 29 | self.assert_data(17, [{'goods_id': 1, 'goods_name': '青菜', 'order_count': 4, 'order_exists': 1}, 30 | {'goods_id': 2, 'goods_name': '白菜', 'order_count': 2, 'order_exists': 1}, 31 | {'goods_id': 3, 'goods_name': '萝卜', 'order_count': None, 'order_exists': 0}], 32 | "data error") 33 | self.assert_data(21, [{'cnt': 1, 'order_id': 1, 'total_amount': 9.6}, 34 | {'cnt': 1, 'order_id': 2, 'total_amount': 7.6}, 35 | {'cnt': 1, 'order_id': 3, 'total_amount': 3}], 36 | "data error") 37 | self.assert_data(23, [{'total_amount': 15.6, 'uid': 1}, {'total_amount': 27.6, 'uid': 2}], 38 | "data error") 39 | self.assert_data(28, [{'amount': 3, 40 | 'has_history': 1, 41 | 'history_exists': 1, 42 | 'order_id': 3, 43 | 'uid': 2}], "data error") 44 | self.assert_data(36, [{'amount': 3, 45 | 'has_history': 1, 46 | 'history_exists': 1, 47 | 'order_id': 3, 48 | 'uid': 2}], "data error") 49 | self.assert_data(44, [{'amount': 3, 50 | 'has_history': 1, 51 | 'history_exists': 1, 52 | 'order_id': 3, 53 | 'uid': 2}], "data error") 54 | -------------------------------------------------------------------------------- /tests/test_example_type_annotation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/21 3 | # create by: snower 4 | 5 | import datetime 6 | import uuid 7 | from bson.objectid import ObjectId 8 | from .example import ExampleTestCase 9 | 10 | 11 | class TypeAnnotationExampleTestCase(ExampleTestCase): 12 | example_name = "type_annotation" 13 | 14 | def test_type_annotation(self): 15 | self.execute("type_annotation.sql") 16 | 17 | self.assert_data(5, [{'t': 'str', 'v': '3243243'}], "data error") 18 | self.assert_data(7, [{'t': 'str', 'v': '12.90'}], "data error") 19 | self.assert_value(10, 't', 'datetime', "data error") 20 | self.assert_value(10, 'v', lambda value: isinstance(value, datetime.datetime), "data error") 21 | self.assert_value(12, 'orderAt', lambda value: isinstance(value, datetime.datetime), "data error") 22 | self.assert_value(12, 't', 'str', "data error") 23 | self.assert_value(12, 'v', '2023-02-23', "data error") 24 | self.assert_value(16, 'orderAt', lambda value: isinstance(value, datetime.datetime), "data error") 25 | self.assert_value(16, 't', 'int', "data error") 26 | self.assert_value(16, 'v', lambda value: isinstance(value, int), "data error") 27 | self.assert_data(21, [{'t': 'objectid', 'v': ObjectId('6422a08fb4055348da72633b')}, 28 | {'t': 'objectid', 'v': ObjectId('6422a0a4b4055348da72633c')}], "data error") 29 | self.assert_data(26, [{'t': 'uuid', 'v': uuid.UUID('da984ae6-cd3f-11ed-af1b-eb91b1b4fa12')}], "data error") 30 | self.assert_data(28, [{'payId': uuid.UUID('da984ae6-cd3f-11ed-af1b-eb91b1b4fa12'), 't': 'int', 31 | 'v': 290562451387917676601713545966572075538}], "data error") 32 | self.assert_data(33, [{'t': 'bool', 'v': True}], "data error") 33 | self.assert_data(34, [{'v1': False, 'v2': True, 'v3': False, 'v4': True, 'v5': False}], "data error") 34 | 35 | def test_type_declaration_cast(self): 36 | self.execute("type_declaration_cast.sql") 37 | 38 | self.assert_data(5, [{'t': 'str', 'v': '3243243'}], "data error") 39 | self.assert_data(7, [{'t': 'str', 'v': '12.90'}], "data error") 40 | self.assert_value(10, 't', 'datetime', "data error") 41 | self.assert_value(10, 'v', lambda value: isinstance(value, datetime.datetime), "data error") 42 | self.assert_value(12, 'orderAt', lambda value: isinstance(value, datetime.datetime), "data error") 43 | self.assert_value(12, 't', 'str', "data error") 44 | self.assert_value(12, 'v', '2023-02-23', "data error") 45 | self.assert_value(16, 'orderAt', lambda value: isinstance(value, datetime.datetime), "data error") 46 | self.assert_value(16, 't', 'int', "data error") 47 | self.assert_value(16, 'v', lambda value: isinstance(value, int), "data error") 48 | self.assert_data(21, [{'t': 'objectid', 'v': ObjectId('6422a08fb4055348da72633b')}, 49 | {'t': 'objectid', 'v': ObjectId('6422a0a4b4055348da72633c')}], "data error") 50 | self.assert_data(26, [{'t': 'uuid', 'v': uuid.UUID('da984ae6-cd3f-11ed-af1b-eb91b1b4fa12')}], "data error") 51 | self.assert_data(28, [{'payId': uuid.UUID('da984ae6-cd3f-11ed-af1b-eb91b1b4fa12'), 't': 'int', 52 | 'v': 290562451387917676601713545966572075538}], "data error") 53 | self.assert_data(33, [{'t': 'bool', 'v': True}], "data error") 54 | self.assert_data(34, [{'v1': False, 'v2': True, 'v3': False, 'v4': True, 'v5': False}], "data error") 55 | -------------------------------------------------------------------------------- /tests/test_example_yield_data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/25 3 | # create by: snower 4 | 5 | from .example import ExampleTestCase 6 | 7 | 8 | class YieldDataExampleTestCase(ExampleTestCase): 9 | example_name = "yield_data" 10 | 11 | def test_type_annotation(self): 12 | self.execute("yield_data.sql") 13 | 14 | self.assert_data(5, [{'a': 'a', 'b': 1}, {'a': 'b', 'b': 2}, {'a': 'c', 'b': 3}], "data error") 15 | 16 | self.assert_data(11, [{'a': '青菜', 'b': '青菜'}, {'a': '白菜', 'b': '白菜'}, {'a': '青菜', 'b': '青菜'}], "data error") 17 | 18 | self.assert_data(13, [{'a': 1}, {'a': 2}, {'a': 3}], "data error") 19 | 20 | self.assert_data(17, [{'RANGE_COUNT(3)': 0, 'GENERATE_CUSTOMIZE$RANGE_COUNT(4)': 0}, 21 | {'RANGE_COUNT(3)': 1, 'GENERATE_CUSTOMIZE$RANGE_COUNT(4)': 1}, 22 | {'RANGE_COUNT(3)': 2, 'GENERATE_CUSTOMIZE$RANGE_COUNT(4)': 2}, 23 | {'RANGE_COUNT(3)': None, 'GENERATE_CUSTOMIZE$RANGE_COUNT(4)': 3}], "data error") 24 | -------------------------------------------------------------------------------- /tests/test_script_engine.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 2023/4/23 3 | # create by: snower 4 | 5 | from unittest import TestCase 6 | from syncanysql import ScriptEngine 7 | 8 | 9 | class ScriptEngineTestCase(TestCase): 10 | def get_test_data(self): 11 | return [ 12 | {"site_id": 8, "site_name": "黄豆网", "site_amount": 17.04, "timeout_at": "16:00:00", 13 | "vip_timeout_at": "11:00:00"}, 14 | {"site_id": 9, "site_name": "青菜网", "site_amount": 7.2, "timeout_at": "15:00:00", 15 | "vip_timeout_at": "11:00:00"}, 16 | {"site_id": 8, "site_name": "去啥网", "site_amount": 10.41, "timeout_at": "16:00:00", 17 | "vip_timeout_at": "11:00:00"}, 18 | {"site_id": 10, "site_name": "汽车网", "site_amount": 5.8, "timeout_at": "16:00:00", 19 | "vip_timeout_at": "11:00:00"}, 20 | {"site_id": 9, "site_name": "火箭网", "site_amount": 4.5, "timeout_at": "15:00:00", 21 | "vip_timeout_at": "10:00:00"}, 22 | {"site_id": 9, "site_name": "卫星网", "site_amount": 11.2, "timeout_at": "16:40:00", 23 | "vip_timeout_at": "11:20:00"}, 24 | ] 25 | 26 | def get_stats_data(self): 27 | return [ 28 | {'site_id': 8, 'site_name': '黄豆网', 'order_cnt': 2, 'total_amount': 27.45}, 29 | {'site_id': 9, 'site_name': '青菜网', 'order_cnt': 3, 'total_amount': 22.9}, 30 | {'site_id': 10, 'site_name': '汽车网', 'order_cnt': 1, 'total_amount': 5.8} 31 | ] 32 | 33 | def test_script_engine(self): 34 | with ScriptEngine() as engine: 35 | engine.push_memory_datas("test_data", self.get_test_data()) 36 | self.assertEqual(engine.get_memory_datas("test_data"), self.get_test_data()) 37 | 38 | engine.execute(''' 39 | INSERT INTO `stats_data` SELECT site_id, site_name, count(*) order_cnt, sum(site_amount) total_amount 40 | FROM test_data GROUP BY site_id; 41 | ''') 42 | self.assertEqual(engine.get_memory_datas("stats_data"), self.get_stats_data()) 43 | self.assertEqual(engine.pop_memory_datas("stats_data"), self.get_stats_data()) 44 | self.assertEqual(engine.get_memory_datas("stats_data"), []) 45 | 46 | engine.use("test_func", lambda: 1) 47 | engine.set_variable("test_var", 1) 48 | 49 | with engine.context() as context: 50 | context.use("test_func", lambda: 2) 51 | context.set_variable("test_var", 2) 52 | context.use("test_context_func", lambda: 2) 53 | context.set_variable("test_context_var", 2) 54 | 55 | context.execute(''' 56 | insert into `result_data` select test_func() as test_func, @test_var as test_var, 57 | test_context_func() as test_context_func, @test_context_var as test_context_var; 58 | 59 | set global @aaa=1; 60 | ''') 61 | self.assertEqual(context.get_memory_datas("result_data"), 62 | [{"test_func": 2, "test_var": 2, "test_context_func": 2, "test_context_var": 2}]) 63 | self.assertEqual(context.pop_memory_datas("result_data"), 64 | [{"test_func": 2, "test_var": 2, "test_context_func": 2, "test_context_var": 2}]) 65 | self.assertEqual(context.get_memory_datas("result_data"), []) 66 | 67 | engine.execute(''' 68 | insert into `result_data` select test_func() as test_func, @test_var as test_var; 69 | 70 | insert into `result_aaa` select @aaa as aaa; 71 | ''') 72 | self.assertEqual(engine.get_memory_datas("result_data"), 73 | [{"test_func": 1, "test_var": 1}]) 74 | self.assertEqual(engine.pop_memory_datas("result_data"), 75 | [{"test_func": 1, "test_var": 1}]) 76 | self.assertEqual(engine.get_memory_datas("result_data"), []) 77 | 78 | self.assertEqual(engine.pop_memory_datas("result_aaa"), [{"aaa": 1}]) 79 | 80 | --------------------------------------------------------------------------------