├── .gitignore ├── LICENSE ├── README.md ├── app.py ├── bbq_manufacturing.db ├── chainlit.md ├── lookups └── sqlite3-functions.json ├── poetry.lock ├── prompts ├── dig_deeper.prompt ├── explain_result.prompt ├── query_classification.prompt ├── query_expansion.prompt ├── query_healing.prompt └── sql_generation.prompt ├── pyproject.toml ├── run.py ├── scripts └── generate_database.py └── table_rag └── __init__.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | 164 | .chainlit/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 knowhow-ai 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # 🌟 **Table-rag+: A Self-Healing Query Generator for Tabular Data** 🌟 3 | 4 | **Table-rag+** is a self-healing query generator designed to assist with querying large-scale tables using language models (LMs). It expands upon the ideas introduced in the [TableRAG paper](https://arxiv.org/abs/2410.04739v1) by incorporating schema and cell retrieval, query expansion, and enhanced error-handling mechanisms, making it ideal for complex table queries. 5 | 6 | --- 7 | 8 | ## ✨ **Key Features** 9 | 10 | - **🔄 Tabular Query Expansion**: Automatically expands user queries to suggest the most relevant columns and cell values. 11 | - **🔑 Foreign Key Detection**: Extracts foreign key relationships between tables and provides intelligent suggestions for joins. 12 | - **📊 Cell Database**: Builds an efficient database of distinct column-value pairs to retrieve relevant cells, improving query accuracy. 13 | - **⚡ Self-Healing SQL Execution**: When an SQL query fails, the system automatically attempts to heal the query and retries execution up to **three times**. 14 | - **🤖 Integration with Mistral-Nemo (Ollama)**: Uses *Mistral-Nemo* via the *Ollama* API to process natural language and generate optimized SQL queries. 15 | - **🛠️ Contextual Query Repair**: Automatically logs errors and regenerates SQL queries based on feedback from the database system. 16 | - **📜 Prompt-Based Query Generation**: Manages prompt templates with external `.prompt` files for easy updates and customization. 17 | - **📈 Efficient Query Processing**: Utilizes schema and cell retrieval to minimize token complexity and ensure efficient large table processing. 18 | 19 | --- 20 | 21 | ## ⚙️ **Installation** 22 | 23 | ### 📋 **Prerequisites** 24 | - 🐍 Python 3.7+ 25 | - 📦 Poetry for dependency management 26 | - 🗄️ SQLite3 (usually included with Python) 27 | - 🧠 [**Ollama**](https://ollama.com/) with [**Mistral-Nemo**](https://ollama.com/library/mistral-nemo) model 28 | - 🖥️ [**Chainlit**](https://docs.chainlit.io/) 29 | 30 | ### 🚀 **Step-by-Step Setup** 31 | 32 | 1. **Clone the Repository**: 33 | ```bash 34 | git clone git@github.com:knowhow-ai/table-rag-plus.git 35 | cd table-rag-plus 36 | ``` 37 | 38 | 2. **Install Dependencies Using Poetry**: 39 | ```bash 40 | poetry install 41 | ``` 42 | 43 | 3. **Set Environment Variables**: 44 | ```bash 45 | export LLM_API_SERVER="http://localhost:11434/v1" 46 | export LLM_API_KEY="ollama" 47 | export LLM_MODEL="mistral-nemo" 48 | ``` 49 | 50 | 4. **Install SQLite (if not already installed)**: 51 | ```bash 52 | sudo apt-get install sqlite3 53 | ``` 54 | 55 | 5. **Prepare the Database**: 56 | A SQLite database named `bbq_manufacturing.db` will be created when you run the demo. 57 | 58 | --- 59 | 60 | ## 🖥️ **Demo** 61 | 62 | To run the demo: 63 | 64 | ```bash 65 | poetry run chainlit run app.py 66 | ``` 67 | 68 | This launches the Chainlit application where you can enter natural language queries. **Table-rag+** will translate them into SQL and execute them against the fictional BBQ manufacturing company's SQLite database. 69 | 70 | ### 🛠️ **Example Prompts** 71 | 72 | Here are some example prompts you can use: 73 | 74 | - **"Who is selling the most BBQ sauce?"** 75 | - **"Show me the total sales for BBQ grills in the last year."** 76 | - **"What is the average salary of employees in the HR department?"** 77 | - **"Which employee worked the most hours last month?"** 78 | - **"What is the gross pay for employees in the Marketing department?"** 79 | 80 | --- 81 | 82 | ## ⚡ **Self-Healing Feature** 83 | 84 | If a query fails during execution, **Table-rag+** will: 85 | 1. Attempt to heal the query using an LLM. 86 | 2. Retry execution up to **three times**. 87 | 3. Return an error if the retries are unsuccessful. 88 | 89 | ### ✨ **Example** 90 | 91 | ```python 92 | async def run(): 93 | prompt = "What is the average salary of employees in the HR department?" 94 | sql_query = await table_rag.generate_sql_query(prompt) 95 | result_tuple = await table_rag.execute_sql_query(sql_query) 96 | print(result_tuple) 97 | ``` 98 | 99 | Upon input, the system will generate a corresponding SQL query and attempt to execute it, correcting any errors if necessary. 100 | 101 | --- 102 | 103 | ## 📚 **References** 104 | 105 | This project builds upon the concepts introduced in the following research paper: 106 | 107 | - 📄 **Si-An Chen, Lesly Miculicich, Julian Martin Eisenschlos, et al.** 108 | "TableRAG: Million-Token Table Understanding with Language Models." 109 | 38th Conference on Neural Information Processing Systems (NeurIPS 2024). 110 | [**arXiv:2410.04739v1**](https://arxiv.org/abs/2410.04739v1) 111 | 112 | --- 113 | 114 | ## 🚀 **Future Improvements** 115 | 116 | ### 🔨 **Work in Progress** 117 | This is the **first version** of **table-rag+**. It currently works only with **SQLite databases** and is **not optimized for large-scale datasets**. Here are some exciting improvements we have planned: 118 | 119 | - **🤖 Auto-Prompt Generation for SQLite and Other Data Sources**: 120 | - Automatic generation of optimized prompts based on database structure, starting with **SQLite**. 121 | - **🔍 Retrieval-Augmented Generation (RAG) Support**: 122 | - Integrate **RAG** to enhance the system’s ability to answer questions by retrieving relevant documents, support procedures alongside database queries. 123 | - **🔄 Move to SQLAlchemy**: 124 | - Expand beyond **SQLite** to support multiple databases by adopting **SQLAlchemy**, allowing integration with PostgreSQL, MySQL, and more. 125 | - **🛠️ Advanced Error Diagnostics**: 126 | - Provide granular feedback on why specific SQL queries fail and offer detailed diagnostics for better debugging and correction. 127 | - **🌐 Distributed Processing**: 128 | - Scale the system across multiple databases and table structures to create a robust solution for enterprise-level use cases. 129 | 130 | --- 131 | 132 | By using **table-rag+**, you can efficiently process natural language queries on tabular data, benefiting from advanced error-handling and query generation capabilities. Stay tuned as we continue to improve and scale this project! 🚀✨ 133 | 134 | --- 135 | 136 | If you have any questions, suggestions, or ideas for contributions, feel free to submit a pull request or reach out! 😊 137 | 138 | 139 | This updated README includes instructions specific to the **Table-rag+** demo setup with **Chainlit**, highlights the fictional BBQ manufacturing database, and provides sample prompts for users to try out. 140 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import asyncio 3 | import os 4 | import chainlit as cl 5 | from table_rag import TableRAG 6 | from openai import AsyncOpenAI # Assuming you're using OpenAI API for LLM 7 | from tabulate import tabulate 8 | 9 | LLM_API_SERVER = os.environ.get("LLM_API_SERVER", "http://localhost:11434/v1") 10 | LLM_API_KEY = os.environ.get("LLM_API_KEY", "ollama") 11 | LLM_MODEL = os.environ.get("LLM_MODEL", "mistral-nemo") 12 | 13 | 14 | logging.basicConfig(level=logging.INFO) 15 | 16 | client = AsyncOpenAI(api_key=LLM_API_KEY, base_url=LLM_API_SERVER) 17 | 18 | db_path = 'bbq_manufacturing.db' 19 | table_rag = TableRAG(db_path, client) 20 | 21 | # Step for generating SQL query 22 | @cl.step(type="tool") 23 | async def generate_sql_query(prompt: str): 24 | try: 25 | sql_query = await table_rag.generate_sql_query(prompt) 26 | return {"sql_query": sql_query} 27 | except Exception as e: 28 | logging.error(f"Error generating SQL query: {e}") 29 | return {"error": str(e)} 30 | 31 | # Step for executing SQL query 32 | @cl.step(type="tool") 33 | async def execute_sql_query(prompt: str, sql_query: str): 34 | try: 35 | result_tuple = await table_rag.execute_sql_query(prompt, sql_query) 36 | return {"results": result_tuple[0], "columns": result_tuple[1]} 37 | except Exception as e: 38 | logging.error(f"Error executing SQL query: {e}") 39 | return {"error": str(e)} 40 | 41 | # Step for explaining result 42 | @cl.step(type="tool") 43 | async def explain_result(result, prompt): 44 | try: 45 | explanation = await table_rag.explain_result(result, prompt) 46 | return {"explanation": explanation} 47 | except Exception as e: 48 | logging.error(f"Error explaining result: {e}") 49 | return {"error": str(e)} 50 | 51 | # Step for digging deeper into the result 52 | @cl.step(type="tool") 53 | async def dig_deeper(sql_query: str, result, prompt: str, explanation: str): 54 | try: 55 | dig_deeper_sql = await table_rag.dig_deeper(sql_query, result, prompt, explanation) 56 | return {"dig_deeper_sql": dig_deeper_sql} 57 | except Exception as e: 58 | logging.error(f"Error digging deeper: {e}") 59 | return {"error": str(e)} 60 | 61 | # Define what happens when a message is received 62 | @cl.on_message 63 | async def main(message: cl.Message): 64 | prompt = message.content 65 | 66 | try: 67 | # Generate the SQL query 68 | sql_query_result = await generate_sql_query(prompt) 69 | if "error" in sql_query_result: 70 | await cl.Message(content=f"Error: {sql_query_result['error']}").send() 71 | return 72 | 73 | sql_query = sql_query_result["sql_query"] 74 | await cl.Message(content=f"```sql\n{sql_query}\n```").send() 75 | 76 | # Execute the SQL query 77 | result_tuple = await execute_sql_query(prompt, sql_query) 78 | if "error" in result_tuple: 79 | await cl.Message(content=f"Error: {result_tuple['error']}").send() 80 | return 81 | 82 | results, columns = result_tuple["results"], result_tuple["columns"] 83 | if results: 84 | result_table = tabulate(results, headers=columns, tablefmt="github") 85 | await cl.Message(content=result_table).send() 86 | table_rag.add_message({"role":"assistant", "content": result_table}) 87 | 88 | # Explain the result 89 | explanation_result = await explain_result(result_table, prompt) 90 | if "error" in explanation_result: 91 | await cl.Message(content=f"Error: {explanation_result['error']}").send() 92 | return 93 | 94 | explanation = explanation_result["explanation"] 95 | table_rag.add_message({"role":"assistant", "content": explanation}) 96 | await cl.Message(content=f"Explanation: {explanation}").send() 97 | 98 | # Dig deeper into the result 99 | dig_deeper_result = await dig_deeper(sql_query, result_table, prompt, explanation) 100 | if "error" in dig_deeper_result: 101 | await cl.Message(content=f"Error: {dig_deeper_result['error']}").send() 102 | return 103 | 104 | dig_deeper_sql = dig_deeper_result["dig_deeper_sql"] 105 | deeper_result_tuple = await execute_sql_query(prompt, dig_deeper_sql) 106 | if "error" in deeper_result_tuple: 107 | await cl.Message(content=f"Error: {deeper_result_tuple['error']}").send() 108 | return 109 | 110 | 111 | try: 112 | deeper_results, deeper_columns = deeper_result_tuple["results"], deeper_result_tuple["columns"] 113 | deeper_result_table = tabulate(deeper_results, headers=deeper_columns, tablefmt="github") 114 | table_rag.add_message({"role":"assistant", "content": deeper_result_table}) 115 | await cl.Message(content=f"Deeper Result:\n{deeper_result_table}").send() 116 | 117 | # Explain the deeper result 118 | deeper_explanation_result = await explain_result(deeper_results, prompt) 119 | if "error" in deeper_explanation_result: 120 | # send just the explain 121 | #await cl.Message(content=f"Error: {deeper_explanation_result['error']}").send() 122 | return 123 | else: 124 | deeper_explanation = deeper_explanation_result["explanation"] 125 | await cl.Message(content=f"Deeper Explanation: {deeper_explanation}").send() 126 | table_rag.add_message({"role":"assistant", "content": deeper_explanation}) 127 | except Exception as e: 128 | return 129 | 130 | except ValueError as e: 131 | await cl.Message(content=f"...").send() 132 | -------------------------------------------------------------------------------- /bbq_manufacturing.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/knowhow-ai/table-rag-plus/b302a8789da2f2895bb1b26d3f198b68f8fe39fe/bbq_manufacturing.db -------------------------------------------------------------------------------- /chainlit.md: -------------------------------------------------------------------------------- 1 | # Welcome to Table-rag+! 🌟🤖 2 | 3 | Hi there, Developer! 👋 We’re thrilled to introduce you to **Table-rag+**, a powerful self-healing query generator designed for complex tabular data. Built upon the ideas from the TableRAG paper, **Table-rag+** takes SQL query generation and error-handling to the next level by incorporating schema and cell retrieval, query expansion, and enhanced contextual feedback. This is the **first version**, and we're excited for you to explore its capabilities! 4 | 5 | 6 | # About this Demo! 🌟🤖 7 | 8 | In this demo, you can interact with a fictional manufacturing company's database that produces BBQs and accessories. Using **Table-rag+**, you'll be able to generate SQL queries from natural language prompts, query the SQLite database, and see how the system handles and heals any query issues. 9 | 10 | ## This demo 11 | 12 | Here are some sample prompts to help you get started with **Table-rag+**: 13 | 14 | - **"Who is selling the most BBQ sauce?"** 15 | - **"Show me the total sales for BBQ grills in the last year."** 16 | - **"What is the average salary of employees in the Engineering department?"** 17 | - **"Which employee worked the most hours last month?"** 18 | - **"What is the gross pay for employees in the HR department?"** 19 | 20 | ## Key Features ✨ 21 | 22 | - **🔄 Tabular Query Expansion**: Automatically expands user queries for relevant columns and cell values. 23 | - **📊 Foreign Key Detection & Intelligent Joins**: Makes query-building more efficient with relationship suggestions. 24 | - **⚡ Self-Healing SQL Execution**: Corrects and retries failed queries up to **three times**. 25 | - **🤖 LLM-Powered Queries**: Uses *Mistral-Nemo* via the *Ollama* API to translate natural language into SQL. 26 | 27 | ## Future Plans 🚀 28 | 29 | We’re committed to scaling **Table-rag+** with support for more databases, distributed processing, and advanced error diagnostics to handle enterprise-level datasets. Your feedback will help shape this vision, and we can't wait to see what you build with it! 30 | 31 | ## Useful Links 🔗 32 | 33 | - **GitHub Repository**: [Explore the Table-rag+ Code](https://github.com/knowhow-ai/table-rag-plus) 34 | - **TableRAG Paper**: [Read the Original Research](https://arxiv.org/abs/2410.04739v1) 35 | - **Ollama Mistral-Nemo**: [Check Out the LLM Model](https://ollama.com/library/mistral-nemo) 36 | 37 | We can’t wait to see the insights you’ll unlock with **Table-rag+**! Happy Chatting! 💻😊 38 | 39 | 40 | -------------------------------------------------------------------------------- /lookups/sqlite3-functions.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "length", 4 | "description": "Returns the number of characters in a string.", 5 | "category": "string", 6 | "example": "SELECT length('Hello, World!'); -- Returns 13" 7 | }, 8 | { 9 | "name": "upper", 10 | "description": "Converts a string to uppercase.", 11 | "category": "string", 12 | "example": "SELECT upper('Hello, World!'); -- Returns 'HELLO, WORLD!'" 13 | }, 14 | { 15 | "name": "lower", 16 | "description": "Converts a string to lowercase.", 17 | "category": "string", 18 | "example": "SELECT lower('Hello, World!'); -- Returns 'hello, world!'" 19 | }, 20 | { 21 | "name": "substr", 22 | "description": "Extracts a substring from a string starting at a specified position.", 23 | "category": "string", 24 | "example": "SELECT substr('Hello, World!', 8, 5); -- Returns 'World'" 25 | }, 26 | { 27 | "name": "replace", 28 | "description": "Replaces occurrences of a specified substring with another string.", 29 | "category": "string", 30 | "example": "SELECT replace('Hello, World!', 'World', 'SQLite'); -- Returns 'Hello, SQLite!'" 31 | }, 32 | { 33 | "name": "trim", 34 | "description": "Removes leading and trailing spaces from a string.", 35 | "category": "string", 36 | "example": "SELECT trim(' Hello, World! '); -- Returns 'Hello, World!'" 37 | }, 38 | { 39 | "name": "abs", 40 | "description": "Returns the absolute value of a number.", 41 | "category": "math", 42 | "example": "SELECT abs(-10); -- Returns 10" 43 | }, 44 | { 45 | "name": "round", 46 | "description": "Rounds a number to a specified number of decimal places.", 47 | "category": "math", 48 | "example": "SELECT round(3.14159, 2); -- Returns 3.14" 49 | }, 50 | { 51 | "name": "ceil", 52 | "description": "Returns the smallest integer greater than or equal to a number.", 53 | "category": "math", 54 | "example": "SELECT ceil(3.14159); -- Returns 4" 55 | }, 56 | { 57 | "name": "random", 58 | "description": "Generates a random integer between -9223372036854775808 and +9223372036854775807.", 59 | "category": "math", 60 | "example": "SELECT random(); -- Returns a random integer" 61 | }, 62 | { 63 | "name": "date", 64 | "description": "Returns the date in 'YYYY-MM-DD' format.", 65 | "category": "date_time", 66 | "example": "SELECT date('now'); -- Returns the current date" 67 | }, 68 | { 69 | "name": "time", 70 | "description": "Returns the time in 'HH:MM:SS' format.", 71 | "category": "date_time", 72 | "example": "SELECT time('now'); -- Returns the current time" 73 | }, 74 | { 75 | "name": "datetime", 76 | "description": "Returns the date and time in 'YYYY-MM-DD HH:MM:SS' format.", 77 | "category": "date_time", 78 | "example": "SELECT datetime('now'); -- Returns the current date and time" 79 | }, 80 | { 81 | "name": "timediff", 82 | "description": "Calculates the difference between two times.", 83 | "category": "date_time", 84 | "example": "SELECT timediff('2024-10-21 14:00:00', '2024-10-21 12:30:00'); -- Returns '01:30:00'" 85 | }, 86 | { 87 | "name": "count", 88 | "description": "Counts the number of items in a set.", 89 | "category": "aggregate", 90 | "example": "SELECT count(*) FROM employees; -- Returns the total number of employees" 91 | }, 92 | { 93 | "name": "sum", 94 | "description": "Calculates the sum of a set of numbers.", 95 | "category": "aggregate", 96 | "example": "SELECT sum(salary) FROM employees; -- Returns the total sum of all salaries" 97 | } 98 | ] 99 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "annotated-types" 5 | version = "0.7.0" 6 | description = "Reusable constraint types to use with typing.Annotated" 7 | optional = false 8 | python-versions = ">=3.8" 9 | files = [ 10 | {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, 11 | {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, 12 | ] 13 | 14 | [[package]] 15 | name = "anyio" 16 | version = "4.6.2.post1" 17 | description = "High level compatibility layer for multiple asynchronous event loop implementations" 18 | optional = false 19 | python-versions = ">=3.9" 20 | files = [ 21 | {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, 22 | {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, 23 | ] 24 | 25 | [package.dependencies] 26 | exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} 27 | idna = ">=2.8" 28 | sniffio = ">=1.1" 29 | typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} 30 | 31 | [package.extras] 32 | doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] 33 | test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] 34 | trio = ["trio (>=0.26.1)"] 35 | 36 | [[package]] 37 | name = "certifi" 38 | version = "2024.8.30" 39 | description = "Python package for providing Mozilla's CA Bundle." 40 | optional = false 41 | python-versions = ">=3.6" 42 | files = [ 43 | {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, 44 | {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, 45 | ] 46 | 47 | [[package]] 48 | name = "colorama" 49 | version = "0.4.6" 50 | description = "Cross-platform colored terminal text." 51 | optional = false 52 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 53 | files = [ 54 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 55 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 56 | ] 57 | 58 | [[package]] 59 | name = "coloredlogs" 60 | version = "15.0.1" 61 | description = "Colored terminal output for Python's logging module" 62 | optional = false 63 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 64 | files = [ 65 | {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, 66 | {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, 67 | ] 68 | 69 | [package.dependencies] 70 | humanfriendly = ">=9.1" 71 | 72 | [package.extras] 73 | cron = ["capturer (>=2.4)"] 74 | 75 | [[package]] 76 | name = "distro" 77 | version = "1.9.0" 78 | description = "Distro - an OS platform information API" 79 | optional = false 80 | python-versions = ">=3.6" 81 | files = [ 82 | {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, 83 | {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, 84 | ] 85 | 86 | [[package]] 87 | name = "exceptiongroup" 88 | version = "1.2.2" 89 | description = "Backport of PEP 654 (exception groups)" 90 | optional = false 91 | python-versions = ">=3.7" 92 | files = [ 93 | {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, 94 | {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, 95 | ] 96 | 97 | [package.extras] 98 | test = ["pytest (>=6)"] 99 | 100 | [[package]] 101 | name = "faker" 102 | version = "30.8.0" 103 | description = "Faker is a Python package that generates fake data for you." 104 | optional = false 105 | python-versions = ">=3.8" 106 | files = [ 107 | {file = "Faker-30.8.0-py3-none-any.whl", hash = "sha256:4cd0c5ea4bc1e4c902967f6e662f5f5da69f1674d9a94f54e516d27f3c2a6a16"}, 108 | {file = "faker-30.8.0.tar.gz", hash = "sha256:3608c7fcac2acde0eaa6da28dae97628f18f14d54eaa2a92b96ae006f1621bd7"}, 109 | ] 110 | 111 | [package.dependencies] 112 | python-dateutil = ">=2.4" 113 | typing-extensions = "*" 114 | 115 | [[package]] 116 | name = "flatbuffers" 117 | version = "24.3.25" 118 | description = "The FlatBuffers serialization format for Python" 119 | optional = false 120 | python-versions = "*" 121 | files = [ 122 | {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, 123 | {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, 124 | ] 125 | 126 | [[package]] 127 | name = "h11" 128 | version = "0.14.0" 129 | description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" 130 | optional = false 131 | python-versions = ">=3.7" 132 | files = [ 133 | {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, 134 | {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, 135 | ] 136 | 137 | [[package]] 138 | name = "httpcore" 139 | version = "1.0.6" 140 | description = "A minimal low-level HTTP client." 141 | optional = false 142 | python-versions = ">=3.8" 143 | files = [ 144 | {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, 145 | {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, 146 | ] 147 | 148 | [package.dependencies] 149 | certifi = "*" 150 | h11 = ">=0.13,<0.15" 151 | 152 | [package.extras] 153 | asyncio = ["anyio (>=4.0,<5.0)"] 154 | http2 = ["h2 (>=3,<5)"] 155 | socks = ["socksio (==1.*)"] 156 | trio = ["trio (>=0.22.0,<1.0)"] 157 | 158 | [[package]] 159 | name = "httpx" 160 | version = "0.27.2" 161 | description = "The next generation HTTP client." 162 | optional = false 163 | python-versions = ">=3.8" 164 | files = [ 165 | {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, 166 | {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, 167 | ] 168 | 169 | [package.dependencies] 170 | anyio = "*" 171 | certifi = "*" 172 | httpcore = "==1.*" 173 | idna = "*" 174 | sniffio = "*" 175 | 176 | [package.extras] 177 | brotli = ["brotli", "brotlicffi"] 178 | cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] 179 | http2 = ["h2 (>=3,<5)"] 180 | socks = ["socksio (==1.*)"] 181 | zstd = ["zstandard (>=0.18.0)"] 182 | 183 | [[package]] 184 | name = "humanfriendly" 185 | version = "10.0" 186 | description = "Human friendly output for text interfaces using Python" 187 | optional = false 188 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 189 | files = [ 190 | {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, 191 | {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, 192 | ] 193 | 194 | [package.dependencies] 195 | pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} 196 | 197 | [[package]] 198 | name = "idna" 199 | version = "3.10" 200 | description = "Internationalized Domain Names in Applications (IDNA)" 201 | optional = false 202 | python-versions = ">=3.6" 203 | files = [ 204 | {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, 205 | {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, 206 | ] 207 | 208 | [package.extras] 209 | all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] 210 | 211 | [[package]] 212 | name = "jiter" 213 | version = "0.6.1" 214 | description = "Fast iterable JSON parser." 215 | optional = false 216 | python-versions = ">=3.8" 217 | files = [ 218 | {file = "jiter-0.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d08510593cb57296851080018006dfc394070178d238b767b1879dc1013b106c"}, 219 | {file = "jiter-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adef59d5e2394ebbad13b7ed5e0306cceb1df92e2de688824232a91588e77aa7"}, 220 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e02f7a27f2bcc15b7d455c9df05df8ffffcc596a2a541eeda9a3110326e7a3"}, 221 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed69a7971d67b08f152c17c638f0e8c2aa207e9dd3a5fcd3cba294d39b5a8d2d"}, 222 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2019d966e98f7c6df24b3b8363998575f47d26471bfb14aade37630fae836a1"}, 223 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36c0b51a285b68311e207a76c385650322734c8717d16c2eb8af75c9d69506e7"}, 224 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220e0963b4fb507c525c8f58cde3da6b1be0bfddb7ffd6798fb8f2531226cdb1"}, 225 | {file = "jiter-0.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa25c7a9bf7875a141182b9c95aed487add635da01942ef7ca726e42a0c09058"}, 226 | {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e90552109ca8ccd07f47ca99c8a1509ced93920d271bb81780a973279974c5ab"}, 227 | {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:67723a011964971864e0b484b0ecfee6a14de1533cff7ffd71189e92103b38a8"}, 228 | {file = "jiter-0.6.1-cp310-none-win32.whl", hash = "sha256:33af2b7d2bf310fdfec2da0177eab2fedab8679d1538d5b86a633ebfbbac4edd"}, 229 | {file = "jiter-0.6.1-cp310-none-win_amd64.whl", hash = "sha256:7cea41c4c673353799906d940eee8f2d8fd1d9561d734aa921ae0f75cb9732f4"}, 230 | {file = "jiter-0.6.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b03c24e7da7e75b170c7b2b172d9c5e463aa4b5c95696a368d52c295b3f6847f"}, 231 | {file = "jiter-0.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47fee1be677b25d0ef79d687e238dc6ac91a8e553e1a68d0839f38c69e0ee491"}, 232 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0d2f6e01a8a0fb0eab6d0e469058dab2be46ff3139ed2d1543475b5a1d8e7"}, 233 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b809e39e342c346df454b29bfcc7bca3d957f5d7b60e33dae42b0e5ec13e027"}, 234 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9ac7c2f092f231f5620bef23ce2e530bd218fc046098747cc390b21b8738a7a"}, 235 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e51a2d80d5fe0ffb10ed2c82b6004458be4a3f2b9c7d09ed85baa2fbf033f54b"}, 236 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3343d4706a2b7140e8bd49b6c8b0a82abf9194b3f0f5925a78fc69359f8fc33c"}, 237 | {file = "jiter-0.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82521000d18c71e41c96960cb36e915a357bc83d63a8bed63154b89d95d05ad1"}, 238 | {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c843e7c1633470708a3987e8ce617ee2979ee18542d6eb25ae92861af3f1d62"}, 239 | {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2e861658c3fe849efc39b06ebb98d042e4a4c51a8d7d1c3ddc3b1ea091d0784"}, 240 | {file = "jiter-0.6.1-cp311-none-win32.whl", hash = "sha256:7d72fc86474862c9c6d1f87b921b70c362f2b7e8b2e3c798bb7d58e419a6bc0f"}, 241 | {file = "jiter-0.6.1-cp311-none-win_amd64.whl", hash = "sha256:3e36a320634f33a07794bb15b8da995dccb94f944d298c8cfe2bd99b1b8a574a"}, 242 | {file = "jiter-0.6.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1fad93654d5a7dcce0809aff66e883c98e2618b86656aeb2129db2cd6f26f867"}, 243 | {file = "jiter-0.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4e6e340e8cd92edab7f6a3a904dbbc8137e7f4b347c49a27da9814015cc0420c"}, 244 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691352e5653af84ed71763c3c427cff05e4d658c508172e01e9c956dfe004aba"}, 245 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:defee3949313c1f5b55e18be45089970cdb936eb2a0063f5020c4185db1b63c9"}, 246 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26d2bdd5da097e624081c6b5d416d3ee73e5b13f1703bcdadbb1881f0caa1933"}, 247 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18aa9d1626b61c0734b973ed7088f8a3d690d0b7f5384a5270cd04f4d9f26c86"}, 248 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3567c8228afa5ddcce950631c6b17397ed178003dc9ee7e567c4c4dcae9fa0"}, 249 | {file = "jiter-0.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c0507131c922defe3f04c527d6838932fcdfd69facebafd7d3574fa3395314"}, 250 | {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:540fcb224d7dc1bcf82f90f2ffb652df96f2851c031adca3c8741cb91877143b"}, 251 | {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e7b75436d4fa2032b2530ad989e4cb0ca74c655975e3ff49f91a1a3d7f4e1df2"}, 252 | {file = "jiter-0.6.1-cp312-none-win32.whl", hash = "sha256:883d2ced7c21bf06874fdeecab15014c1c6d82216765ca6deef08e335fa719e0"}, 253 | {file = "jiter-0.6.1-cp312-none-win_amd64.whl", hash = "sha256:91e63273563401aadc6c52cca64a7921c50b29372441adc104127b910e98a5b6"}, 254 | {file = "jiter-0.6.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:852508a54fe3228432e56019da8b69208ea622a3069458252f725d634e955b31"}, 255 | {file = "jiter-0.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f491cc69ff44e5a1e8bc6bf2b94c1f98d179e1aaf4a554493c171a5b2316b701"}, 256 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc56c8f0b2a28ad4d8047f3ae62d25d0e9ae01b99940ec0283263a04724de1f3"}, 257 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51b58f7a0d9e084a43b28b23da2b09fc5e8df6aa2b6a27de43f991293cab85fd"}, 258 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f79ce15099154c90ef900d69c6b4c686b64dfe23b0114e0971f2fecd306ec6c"}, 259 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03a025b52009f47e53ea619175d17e4ded7c035c6fbd44935cb3ada11e1fd592"}, 260 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74a8d93718137c021d9295248a87c2f9fdc0dcafead12d2930bc459ad40f885"}, 261 | {file = "jiter-0.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40b03b75f903975f68199fc4ec73d546150919cb7e534f3b51e727c4d6ccca5a"}, 262 | {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:825651a3f04cf92a661d22cad61fc913400e33aa89b3e3ad9a6aa9dc8a1f5a71"}, 263 | {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:928bf25eb69ddb292ab8177fe69d3fbf76c7feab5fce1c09265a7dccf25d3991"}, 264 | {file = "jiter-0.6.1-cp313-none-win32.whl", hash = "sha256:352cd24121e80d3d053fab1cc9806258cad27c53cad99b7a3cac57cf934b12e4"}, 265 | {file = "jiter-0.6.1-cp313-none-win_amd64.whl", hash = "sha256:be7503dd6f4bf02c2a9bacb5cc9335bc59132e7eee9d3e931b13d76fd80d7fda"}, 266 | {file = "jiter-0.6.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:31d8e00e1fb4c277df8ab6f31a671f509ebc791a80e5c61fdc6bc8696aaa297c"}, 267 | {file = "jiter-0.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77c296d65003cd7ee5d7b0965f6acbe6cffaf9d1fa420ea751f60ef24e85fed5"}, 268 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeeb0c0325ef96c12a48ea7e23e2e86fe4838e6e0a995f464cf4c79fa791ceeb"}, 269 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a31c6fcbe7d6c25d6f1cc6bb1cba576251d32795d09c09961174fe461a1fb5bd"}, 270 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59e2b37f3b9401fc9e619f4d4badcab2e8643a721838bcf695c2318a0475ae42"}, 271 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bae5ae4853cb9644144e9d0755854ce5108d470d31541d83f70ca7ecdc2d1637"}, 272 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df588e9c830b72d8db1dd7d0175af6706b0904f682ea9b1ca8b46028e54d6e9"}, 273 | {file = "jiter-0.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15f8395e835cf561c85c1adee72d899abf2733d9df72e9798e6d667c9b5c1f30"}, 274 | {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a99d4e0b5fc3b05ea732d67eb2092fe894e95a90e6e413f2ea91387e228a307"}, 275 | {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a311df1fa6be0ccd64c12abcd85458383d96e542531bafbfc0a16ff6feda588f"}, 276 | {file = "jiter-0.6.1-cp38-none-win32.whl", hash = "sha256:81116a6c272a11347b199f0e16b6bd63f4c9d9b52bc108991397dd80d3c78aba"}, 277 | {file = "jiter-0.6.1-cp38-none-win_amd64.whl", hash = "sha256:13f9084e3e871a7c0b6e710db54444088b1dd9fbefa54d449b630d5e73bb95d0"}, 278 | {file = "jiter-0.6.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1c53615fcfec3b11527c08d19cff6bc870da567ce4e57676c059a3102d3a082"}, 279 | {file = "jiter-0.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f791b6a4da23238c17a81f44f5b55d08a420c5692c1fda84e301a4b036744eb1"}, 280 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c97e90fec2da1d5f68ef121444c2c4fa72eabf3240829ad95cf6bbeca42a301"}, 281 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3cbc1a66b4e41511209e97a2866898733c0110b7245791ac604117b7fb3fedb7"}, 282 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e85f9e12cd8418ab10e1fcf0e335ae5bb3da26c4d13a0fd9e6a17a674783b6"}, 283 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08be33db6dcc374c9cc19d3633af5e47961a7b10d4c61710bd39e48d52a35824"}, 284 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:677be9550004f5e010d673d3b2a2b815a8ea07a71484a57d3f85dde7f14cf132"}, 285 | {file = "jiter-0.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8bd065be46c2eecc328e419d6557bbc37844c88bb07b7a8d2d6c91c7c4dedc9"}, 286 | {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bd95375ce3609ec079a97c5d165afdd25693302c071ca60c7ae1cf826eb32022"}, 287 | {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db459ed22d0208940d87f614e1f0ea5a946d29a3cfef71f7e1aab59b6c6b2afb"}, 288 | {file = "jiter-0.6.1-cp39-none-win32.whl", hash = "sha256:d71c962f0971347bd552940ab96aa42ceefcd51b88c4ced8a27398182efa8d80"}, 289 | {file = "jiter-0.6.1-cp39-none-win_amd64.whl", hash = "sha256:d465db62d2d10b489b7e7a33027c4ae3a64374425d757e963f86df5b5f2e7fc5"}, 290 | {file = "jiter-0.6.1.tar.gz", hash = "sha256:e19cd21221fc139fb032e4112986656cb2739e9fe6d84c13956ab30ccc7d4449"}, 291 | ] 292 | 293 | [[package]] 294 | name = "mpmath" 295 | version = "1.3.0" 296 | description = "Python library for arbitrary-precision floating-point arithmetic" 297 | optional = false 298 | python-versions = "*" 299 | files = [ 300 | {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, 301 | {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, 302 | ] 303 | 304 | [package.extras] 305 | develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] 306 | docs = ["sphinx"] 307 | gmpy = ["gmpy2 (>=2.1.0a4)"] 308 | tests = ["pytest (>=4.6)"] 309 | 310 | [[package]] 311 | name = "numpy" 312 | version = "2.1.2" 313 | description = "Fundamental package for array computing in Python" 314 | optional = false 315 | python-versions = ">=3.10" 316 | files = [ 317 | {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, 318 | {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, 319 | {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"}, 320 | {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"}, 321 | {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"}, 322 | {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"}, 323 | {file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"}, 324 | {file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"}, 325 | {file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"}, 326 | {file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"}, 327 | {file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"}, 328 | {file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"}, 329 | {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"}, 330 | {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"}, 331 | {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"}, 332 | {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"}, 333 | {file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"}, 334 | {file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"}, 335 | {file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"}, 336 | {file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"}, 337 | {file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"}, 338 | {file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"}, 339 | {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"}, 340 | {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"}, 341 | {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"}, 342 | {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"}, 343 | {file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"}, 344 | {file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"}, 345 | {file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"}, 346 | {file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"}, 347 | {file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"}, 348 | {file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"}, 349 | {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"}, 350 | {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"}, 351 | {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"}, 352 | {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"}, 353 | {file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"}, 354 | {file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"}, 355 | {file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"}, 356 | {file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"}, 357 | {file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"}, 358 | {file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"}, 359 | {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"}, 360 | {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"}, 361 | {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"}, 362 | {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"}, 363 | {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"}, 364 | {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"}, 365 | {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"}, 366 | {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"}, 367 | {file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"}, 368 | {file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"}, 369 | {file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"}, 370 | ] 371 | 372 | [[package]] 373 | name = "onnxruntime" 374 | version = "1.19.2" 375 | description = "ONNX Runtime is a runtime accelerator for Machine Learning models" 376 | optional = false 377 | python-versions = "*" 378 | files = [ 379 | {file = "onnxruntime-1.19.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:84fa57369c06cadd3c2a538ae2a26d76d583e7c34bdecd5769d71ca5c0fc750e"}, 380 | {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdc471a66df0c1cdef774accef69e9f2ca168c851ab5e4f2f3341512c7ef4666"}, 381 | {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e3a4ce906105d99ebbe817f536d50a91ed8a4d1592553f49b3c23c4be2560ae6"}, 382 | {file = "onnxruntime-1.19.2-cp310-cp310-win32.whl", hash = "sha256:4b3d723cc154c8ddeb9f6d0a8c0d6243774c6b5930847cc83170bfe4678fafb3"}, 383 | {file = "onnxruntime-1.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:17ed7382d2c58d4b7354fb2b301ff30b9bf308a1c7eac9546449cd122d21cae5"}, 384 | {file = "onnxruntime-1.19.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d863e8acdc7232d705d49e41087e10b274c42f09e259016a46f32c34e06dc4fd"}, 385 | {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dfe4f660a71b31caa81fc298a25f9612815215a47b286236e61d540350d7b6"}, 386 | {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a36511dc07c5c964b916697e42e366fa43c48cdb3d3503578d78cef30417cb84"}, 387 | {file = "onnxruntime-1.19.2-cp311-cp311-win32.whl", hash = "sha256:50cbb8dc69d6befad4746a69760e5b00cc3ff0a59c6c3fb27f8afa20e2cab7e7"}, 388 | {file = "onnxruntime-1.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:1c3e5d415b78337fa0b1b75291e9ea9fb2a4c1f148eb5811e7212fed02cfffa8"}, 389 | {file = "onnxruntime-1.19.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:68e7051bef9cfefcbb858d2d2646536829894d72a4130c24019219442b1dd2ed"}, 390 | {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2d366fbcc205ce68a8a3bde2185fd15c604d9645888703785b61ef174265168"}, 391 | {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:477b93df4db467e9cbf34051662a4b27c18e131fa1836e05974eae0d6e4cf29b"}, 392 | {file = "onnxruntime-1.19.2-cp312-cp312-win32.whl", hash = "sha256:9a174073dc5608fad05f7cf7f320b52e8035e73d80b0a23c80f840e5a97c0147"}, 393 | {file = "onnxruntime-1.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:190103273ea4507638ffc31d66a980594b237874b65379e273125150eb044857"}, 394 | {file = "onnxruntime-1.19.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:636bc1d4cc051d40bc52e1f9da87fbb9c57d9d47164695dfb1c41646ea51ea66"}, 395 | {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5bd8b875757ea941cbcfe01582970cc299893d1b65bd56731e326a8333f638a3"}, 396 | {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2046fc9560f97947bbc1acbe4c6d48585ef0f12742744307d3364b131ac5778"}, 397 | {file = "onnxruntime-1.19.2-cp38-cp38-win32.whl", hash = "sha256:31c12840b1cde4ac1f7d27d540c44e13e34f2345cf3642762d2a3333621abb6a"}, 398 | {file = "onnxruntime-1.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:016229660adea180e9a32ce218b95f8f84860a200f0f13b50070d7d90e92956c"}, 399 | {file = "onnxruntime-1.19.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:006c8d326835c017a9e9f74c9c77ebb570a71174a1e89fe078b29a557d9c3848"}, 400 | {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df2a94179a42d530b936f154615b54748239c2908ee44f0d722cb4df10670f68"}, 401 | {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fae4b4de45894b9ce7ae418c5484cbf0341db6813effec01bb2216091c52f7fb"}, 402 | {file = "onnxruntime-1.19.2-cp39-cp39-win32.whl", hash = "sha256:dc5430f473e8706fff837ae01323be9dcfddd3ea471c900a91fa7c9b807ec5d3"}, 403 | {file = "onnxruntime-1.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:38475e29a95c5f6c62c2c603d69fc7d4c6ccbf4df602bd567b86ae1138881c49"}, 404 | ] 405 | 406 | [package.dependencies] 407 | coloredlogs = "*" 408 | flatbuffers = "*" 409 | numpy = ">=1.21.6" 410 | packaging = "*" 411 | protobuf = "*" 412 | sympy = "*" 413 | 414 | [[package]] 415 | name = "openai" 416 | version = "1.52.0" 417 | description = "The official Python library for the openai API" 418 | optional = false 419 | python-versions = ">=3.7.1" 420 | files = [ 421 | {file = "openai-1.52.0-py3-none-any.whl", hash = "sha256:0c249f20920183b0a2ca4f7dba7b0452df3ecd0fa7985eb1d91ad884bc3ced9c"}, 422 | {file = "openai-1.52.0.tar.gz", hash = "sha256:95c65a5f77559641ab8f3e4c3a050804f7b51d278870e2ec1f7444080bfe565a"}, 423 | ] 424 | 425 | [package.dependencies] 426 | anyio = ">=3.5.0,<5" 427 | distro = ">=1.7.0,<2" 428 | httpx = ">=0.23.0,<1" 429 | jiter = ">=0.4.0,<1" 430 | pydantic = ">=1.9.0,<3" 431 | sniffio = "*" 432 | tqdm = ">4" 433 | typing-extensions = ">=4.11,<5" 434 | 435 | [package.extras] 436 | datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] 437 | 438 | [[package]] 439 | name = "packaging" 440 | version = "24.1" 441 | description = "Core utilities for Python packages" 442 | optional = false 443 | python-versions = ">=3.8" 444 | files = [ 445 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, 446 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, 447 | ] 448 | 449 | [[package]] 450 | name = "protobuf" 451 | version = "5.28.2" 452 | description = "" 453 | optional = false 454 | python-versions = ">=3.8" 455 | files = [ 456 | {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, 457 | {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, 458 | {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, 459 | {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, 460 | {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, 461 | {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, 462 | {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, 463 | {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, 464 | {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, 465 | {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, 466 | {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, 467 | ] 468 | 469 | [[package]] 470 | name = "pydantic" 471 | version = "2.9.2" 472 | description = "Data validation using Python type hints" 473 | optional = false 474 | python-versions = ">=3.8" 475 | files = [ 476 | {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, 477 | {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, 478 | ] 479 | 480 | [package.dependencies] 481 | annotated-types = ">=0.6.0" 482 | pydantic-core = "2.23.4" 483 | typing-extensions = [ 484 | {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, 485 | {version = ">=4.6.1", markers = "python_version < \"3.13\""}, 486 | ] 487 | 488 | [package.extras] 489 | email = ["email-validator (>=2.0.0)"] 490 | timezone = ["tzdata"] 491 | 492 | [[package]] 493 | name = "pydantic-core" 494 | version = "2.23.4" 495 | description = "Core functionality for Pydantic validation and serialization" 496 | optional = false 497 | python-versions = ">=3.8" 498 | files = [ 499 | {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, 500 | {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, 501 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, 502 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, 503 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, 504 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, 505 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, 506 | {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, 507 | {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, 508 | {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, 509 | {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, 510 | {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, 511 | {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, 512 | {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, 513 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, 514 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, 515 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, 516 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, 517 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, 518 | {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, 519 | {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, 520 | {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, 521 | {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, 522 | {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, 523 | {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, 524 | {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, 525 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, 526 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, 527 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, 528 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, 529 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, 530 | {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, 531 | {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, 532 | {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, 533 | {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, 534 | {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, 535 | {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, 536 | {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, 537 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, 538 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, 539 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, 540 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, 541 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, 542 | {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, 543 | {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, 544 | {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, 545 | {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, 546 | {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, 547 | {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, 548 | {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, 549 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, 550 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, 551 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, 552 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, 553 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, 554 | {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, 555 | {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, 556 | {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, 557 | {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, 558 | {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, 559 | {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, 560 | {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, 561 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, 562 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, 563 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, 564 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, 565 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, 566 | {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, 567 | {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, 568 | {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, 569 | {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, 570 | {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, 571 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, 572 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, 573 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, 574 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, 575 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, 576 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, 577 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, 578 | {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, 579 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, 580 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, 581 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, 582 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, 583 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, 584 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, 585 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, 586 | {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, 587 | {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, 588 | ] 589 | 590 | [package.dependencies] 591 | typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" 592 | 593 | [[package]] 594 | name = "pyreadline3" 595 | version = "3.5.4" 596 | description = "A python implementation of GNU readline." 597 | optional = false 598 | python-versions = ">=3.8" 599 | files = [ 600 | {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, 601 | {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, 602 | ] 603 | 604 | [package.extras] 605 | dev = ["build", "flake8", "mypy", "pytest", "twine"] 606 | 607 | [[package]] 608 | name = "python-dateutil" 609 | version = "2.9.0.post0" 610 | description = "Extensions to the standard Python datetime module" 611 | optional = false 612 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 613 | files = [ 614 | {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, 615 | {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, 616 | ] 617 | 618 | [package.dependencies] 619 | six = ">=1.5" 620 | 621 | [[package]] 622 | name = "six" 623 | version = "1.16.0" 624 | description = "Python 2 and 3 compatibility utilities" 625 | optional = false 626 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 627 | files = [ 628 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 629 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 630 | ] 631 | 632 | [[package]] 633 | name = "sniffio" 634 | version = "1.3.1" 635 | description = "Sniff out which async library your code is running under" 636 | optional = false 637 | python-versions = ">=3.7" 638 | files = [ 639 | {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, 640 | {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, 641 | ] 642 | 643 | [[package]] 644 | name = "sqlparse" 645 | version = "0.5.1" 646 | description = "A non-validating SQL parser." 647 | optional = false 648 | python-versions = ">=3.8" 649 | files = [ 650 | {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, 651 | {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, 652 | ] 653 | 654 | [package.extras] 655 | dev = ["build", "hatch"] 656 | doc = ["sphinx"] 657 | 658 | [[package]] 659 | name = "sympy" 660 | version = "1.13.3" 661 | description = "Computer algebra system (CAS) in Python" 662 | optional = false 663 | python-versions = ">=3.8" 664 | files = [ 665 | {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, 666 | {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, 667 | ] 668 | 669 | [package.dependencies] 670 | mpmath = ">=1.1.0,<1.4" 671 | 672 | [package.extras] 673 | dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] 674 | 675 | [[package]] 676 | name = "tabulate" 677 | version = "0.9.0" 678 | description = "Pretty-print tabular data" 679 | optional = false 680 | python-versions = ">=3.7" 681 | files = [ 682 | {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, 683 | {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, 684 | ] 685 | 686 | [package.extras] 687 | widechars = ["wcwidth"] 688 | 689 | [[package]] 690 | name = "tqdm" 691 | version = "4.66.5" 692 | description = "Fast, Extensible Progress Meter" 693 | optional = false 694 | python-versions = ">=3.7" 695 | files = [ 696 | {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, 697 | {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, 698 | ] 699 | 700 | [package.dependencies] 701 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 702 | 703 | [package.extras] 704 | dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] 705 | notebook = ["ipywidgets (>=6)"] 706 | slack = ["slack-sdk"] 707 | telegram = ["requests"] 708 | 709 | [[package]] 710 | name = "typing-extensions" 711 | version = "4.12.2" 712 | description = "Backported and Experimental Type Hints for Python 3.8+" 713 | optional = false 714 | python-versions = ">=3.8" 715 | files = [ 716 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, 717 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, 718 | ] 719 | 720 | [metadata] 721 | lock-version = "2.0" 722 | python-versions = "^3.10" 723 | content-hash = "c025dc811da5ae3b176f372b006ebc6b96ee9bb79029566b53350d26643bd3e1" 724 | -------------------------------------------------------------------------------- /prompts/dig_deeper.prompt: -------------------------------------------------------------------------------- 1 | Your task is to go beyond the current to analyze more factors into the SQLite3 database to augment an answer. 2 | Adhere to these rules: 3 | - **Deliberately go through the question and database schema word by word** to appropriately answer the question 4 | - **Use Table Aliases** to prevent ambiguity. For example, `SELECT table1.col1, table2.col1 FROM table1 JOIN table2 ON table1.id = table2.id`. 5 | - **Use Column Aliases** to provide clear indications of result columns 6 | - When creating a ratio, always cast the numerator as float 7 | - Output only sql queries that are syntactically correct and execute without error in SQLite3. 8 | - **Use ilike aliases for text fields** to improve search s. For example, `SELECT table1.col1 from table1 where col1 ilike '%search_term%'; 9 | - Try to show trends in a meanigful way rather then just a value. 10 | - Do not share any commentary 11 | - Only response with the proper backticks. 12 | - Show only one query or combine queries 13 | 14 | This query will run on a database whose schema is represented in this string: 15 | 16 | {schema} 17 | 18 | The previous DB Sql was: 19 | ```sql 20 | {previous_sql} 21 | ``` 22 | The Previous Result was: 23 | ```sqlite3_result 24 | {previous_result} 25 | ``` 26 | The provided answer was: 27 | ```markdown 28 | {explaination} 29 | ``` 30 | 31 | Query: "{user_query}" 32 | Response: -------------------------------------------------------------------------------- /prompts/explain_result.prompt: -------------------------------------------------------------------------------- 1 | You are a business analyst helping executives understand their data better. 2 | Your task is to review results from a database and given a user natural query and the result of that query, 3 | and explain the results in a human-readable format that gets to the heart of the actual questions and goes beyond 4 | just the "numbers". 5 | Be concise and to the point. Provide high level summaries. 6 | 7 | Results: 8 | {result} 9 | 10 | Human Natural Query: 11 | {query} 12 | 13 | Explanation: -------------------------------------------------------------------------------- /prompts/query_classification.prompt: -------------------------------------------------------------------------------- 1 | Input: "{input_text}" 2 | Classify: Natural Language Query or Not a Natural Language Query? 3 | -------------------------------------------------------------------------------- /prompts/query_expansion.prompt: -------------------------------------------------------------------------------- 1 | Given a database schema and a user query, suggest the most relevant column names and any possible cell values that would be helpful for answering the query. 2 | Please provide both column names and possible cell values as JSON arrays. Only response in JSON with the proper backticks. The JSON response should be a dictionary with two keys, "columns", and "cell_values". Each of these values should be an array of strings. For example, the following is a valid query! No commentary is needed. 3 | 4 | Schema: {schema} 5 | 6 | User Query: What is Sarah Fienman's hourly rate? 7 | Response: ```json {{"columns": ["hourly_rate", "name"], "cell_values": ["Sarah Fineman"]}} ``` 8 | 9 | User Query: What is the average hourly rate? 10 | Response: ```json {{"columns": ["hourly_rate"], "cell_values": []}} ``` 11 | 12 | User Query: {user_query} 13 | Response: -------------------------------------------------------------------------------- /prompts/query_healing.prompt: -------------------------------------------------------------------------------- 1 | Your task is to fix the following SQLite3 SQL query given the natural language query, original SQL query and error. 2 | Adhere to these rules: 3 | - **Deliberately go through the question and database schema word by word** to appropriately answer the question 4 | - **Use Table Aliases** to prevent ambiguity. For example, `SELECT table1.col1, table2.col1 FROM table1 JOIN table2 ON table1.id = table2.id`. 5 | - **Use Column Aliases** to provide clear indications of result columns 6 | - When creating a ratio, always cast the numerator as float 7 | - Output only sql queries that are syntactically correct and execute without error in SQLite3. 8 | - Do not share any commentary 9 | 10 | 11 | Original Natural Language Query: 12 | ``` 13 | {prompt} 14 | ``` 15 | 16 | The following SQLlite3 SQL query failed to execute: 17 | 18 | Query: 19 | ```sql 20 | {original_query} 21 | ``` 22 | Error: 23 | ``` 24 | {error_message} 25 | ``` 26 | 27 | Based on the schema: 28 | ```sql 29 | {schema} 30 | ``` 31 | 32 | Please provide a corrected SQL query for SQLlite3. M 33 | 34 | Correct Query: -------------------------------------------------------------------------------- /prompts/sql_generation.prompt: -------------------------------------------------------------------------------- 1 | Your task is to convert a question into a SQL query in sqlite3, given a SQLite3 atabase schema. 2 | Adhere to these rules: 3 | - **Deliberately go through the question and database schema word by word** to appropriately answer the question 4 | - **Use Table Aliases** to prevent ambiguity. For example, `SELECT table1.col1, table2.col1 FROM table1 JOIN table2 ON table1.id = table2.id`. 5 | - **Use Column Aliases** to provide clear indications of result columns 6 | - When creating a ratio, always cast the numerator as float 7 | - Output only sql queries that are syntactically correct and execute without error in SQLite3. 8 | - **Use ilike aliases for text fields** to improve search s. For example, `SELECT table1.col1 from table1 where col1 ilike '%search_term%'; 9 | - Try to show trends in a meanigful way rather then just a value. 10 | - Do not share any commentary 11 | - Show only one query or combine queries 12 | 13 | Generate a SQL query that answers the question `{user_query}`. 14 | This query will run on a database whose schema is represented in this string: 15 | {schema} 16 | 17 | 18 | Only response with the proper backticks. 19 | 20 | Use the following relevant columns: hourly_rate, department_id, deptartment_name 21 | Use the following relevant cell values: 22 | Query: "What is the average hourly rate?" 23 | Response: ```sql SELECT AVG(hourly_rate) as "Averge Hourly Rate" FROM payroll" ``` 24 | 25 | 26 | Use the following relevant columns: {columns} 27 | Use the following relevant cell values: {cell_values} 28 | Query: "{user_query}" 29 | Response: -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "table-rag" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Kartik Thakore "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10" 10 | openai = "^1.52.0" 11 | faker = "^30.8.0" 12 | sqlparse = "^0.5.1" 13 | onnxruntime = "^1.19.2" 14 | tabulate = "^0.9.0" 15 | 16 | 17 | [build-system] 18 | requires = ["poetry-core"] 19 | build-backend = "poetry.core.masonry.api" 20 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | 2 | import logging 3 | import asyncio 4 | import os 5 | from table_rag import TableRAG 6 | from openai import AsyncOpenAI # Assuming you're using OpenAI API for LLM 7 | from tabulate import tabulate 8 | 9 | LLM_API_SERVER = os.environ.get("LLM_API_SERVER", "http://localhost:11434/v1") 10 | LLM_API_KEY = os.environ.get("LLM_API_KEY", "ollama") 11 | LLM_MODEL = os.environ.get("LLM_MODEL", "mistral-nemo") 12 | 13 | logging.basicConfig(level=logging.INFO) 14 | 15 | 16 | 17 | if __name__ == "__main__": 18 | client = AsyncOpenAI(api_key=LLM_API_KEY, base_url=LLM_API_SERVER) 19 | 20 | db_path = 'bbq_manufacturing.db' 21 | 22 | table_rag = TableRAG(db_path, client) 23 | 24 | async def run(): 25 | while True: 26 | try: 27 | prompt = input("Enter a natural language query: ") 28 | sql_query = await table_rag.generate_sql_query(prompt) 29 | 30 | 31 | result_tuple = await table_rag.execute_sql_query(prompt, sql_query) 32 | results, columns = result_tuple 33 | if results: 34 | result = tabulate(results, headers=columns, tablefmt="grid") 35 | table_rag.add_message({"role":"assistant", "content": result}) 36 | # Use tabulate to print the table 37 | print(result) 38 | 39 | explanation = await table_rag.explain_result(result, prompt) 40 | 41 | # dig deeper 42 | try: 43 | dig_deeper_sql = await table_rag.dig_deeper(sql_query, result, prompt, explanation) 44 | 45 | result_tuple = await table_rag.execute_sql_query(prompt, dig_deeper_sql) 46 | results, columns = result_tuple 47 | 48 | explanation = await table_rag.explain_result(results, prompt) 49 | 50 | except Exception as e: 51 | logging.error(f"Error in dig deeper: {e}") 52 | 53 | print("Explanation:\n", explanation) 54 | 55 | 56 | print("\n") 57 | except ValueError as e: 58 | print(f"Error: {e}") 59 | 60 | asyncio.run(run()) 61 | -------------------------------------------------------------------------------- /scripts/generate_database.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | from datetime import timedelta, datetime 3 | import random 4 | from faker import Faker 5 | fake = Faker() 6 | # Define custom positions and departments relevant to the BBQ manufacturing company 7 | departments = [ 8 | ('Sales', 'Handles customer relations and product sales'), 9 | ('Human Resources', 'Manages employee relations, recruitment, and compliance'), 10 | ('Engineering', 'Designs and improves product features and manufacturing processes'), 11 | ('Marketing', 'Promotes products and manages brand reputation'), 12 | ('Production', 'Manufactures the products, manages quality control and assembly'), 13 | ('Supply Chain', 'Handles procurement of raw materials, logistics, and inventory management') 14 | ] 15 | 16 | positions_by_department = { 17 | 'Sales': ['Sales Representative', 'Account Manager', 'Regional Sales Manager'], 18 | 'Human Resources': ['HR Manager', 'Recruitment Specialist', 'Compliance Officer'], 19 | 'Engineering': ['Mechanical Engineer', 'Product Designer', 'Quality Assurance Engineer'], 20 | 'Marketing': ['Marketing Coordinator', 'Social Media Manager', 'Product Marketing Manager'], 21 | 'Production': ['Production Line Worker', 'Assembly Technician', 'Production Manager'], 22 | 'Supply Chain': ['Procurement Specialist', 'Logistics Coordinator', 'Inventory Manager'] 23 | } 24 | 25 | products = [ 26 | ('BBQ Grill', 399.99, 200.00), 27 | ('BBQ Cover', 29.99, 10.00), 28 | ('BBQ Utensils', 19.99, 5.00), 29 | ('BBQ Sauce', 4.99, 2.00), 30 | ('Charcoal Bag', 14.99, 6.00), 31 | ('Grill Brush', 12.99, 4.00) 32 | ] 33 | 34 | # Create a connection to the database 35 | conn = sqlite3.connect('bbq_manufacturing.db') 36 | 37 | # Create a cursor object using the cursor() method 38 | cursor = conn.cursor() 39 | 40 | # Turn on foreign key support 41 | cursor.execute("PRAGMA foreign_keys = ON") 42 | 43 | # Create Department table 44 | cursor.execute(''' 45 | CREATE TABLE Department ( 46 | department_id INTEGER PRIMARY KEY, 47 | department_name TEXT, 48 | department_description TEXT 49 | );''') 50 | 51 | # Insert custom departments 52 | cursor.executemany("INSERT INTO Department (department_name, department_description) VALUES (?, ?)", departments) 53 | 54 | # Create Employees table 55 | cursor.execute(''' 56 | CREATE TABLE Employees ( 57 | employee_id INTEGER PRIMARY KEY, 58 | first_name TEXT, 59 | last_name TEXT, 60 | department_id INTEGER, 61 | position TEXT, 62 | salary REAL, 63 | hire_date DATE, 64 | termination_date DATE, 65 | FOREIGN KEY (department_id) REFERENCES Department(department_id) 66 | );''') 67 | 68 | # Generate realistic employee data using LLM insights 69 | def generate_employees(num_employees): 70 | employee_data = [] 71 | for _ in range(num_employees): 72 | # Randomize department and position 73 | department_id = random.randint(1, len(departments)) 74 | department_name = departments[department_id - 1][0] 75 | position = random.choice(positions_by_department[department_name]) 76 | 77 | # Generate realistic salary based on department and position 78 | salary = random.uniform(35000, 120000) if department_name != 'Production' else random.uniform(30000, 70000) 79 | 80 | hire_date = datetime.now() - timedelta(days=random.randint(100, 3650)) # Random hire date within 10 years 81 | termination_date = hire_date + timedelta(days=random.randint(365, 1825)) if random.random() < 0.3 else None 82 | # use faker for names 83 | 84 | first_name = fake.first_name() 85 | last_name = fake.last_name() 86 | 87 | employee_data.append((first_name, last_name, department_id, position, salary, hire_date, termination_date)) 88 | 89 | return employee_data 90 | 91 | # Insert employees into the database 92 | employees = generate_employees(50) 93 | cursor.executemany(''' 94 | INSERT INTO Employees (first_name, last_name, department_id, position, salary, hire_date, termination_date) 95 | VALUES (?, ?, ?, ?, ?, ?, ?) 96 | ''', employees) 97 | 98 | # Create Products table 99 | cursor.execute(''' 100 | CREATE TABLE Products ( 101 | product_id INTEGER PRIMARY KEY, 102 | product_name TEXT, 103 | price REAL, 104 | cost REAL 105 | );''') 106 | 107 | # Insert custom products 108 | cursor.executemany("INSERT INTO Products (product_name, price, cost) VALUES (?, ?, ?)", products) 109 | 110 | # Create Sales table 111 | cursor.execute(''' 112 | CREATE TABLE Sales ( 113 | sales_id INTEGER PRIMARY KEY, 114 | employee_id INTEGER, 115 | product_id INTEGER, 116 | quantity INTEGER, 117 | sale_price REAL, 118 | sale_date DATE, 119 | FOREIGN KEY (employee_id) REFERENCES Employees(employee_id), 120 | FOREIGN KEY (product_id) REFERENCES Products(product_id) 121 | );''') 122 | 123 | # Generate realistic sales data 124 | def generate_sales(num_sales): 125 | sales_data = [] 126 | for _ in range(num_sales): 127 | employee_id = random.randint(1, len(employees)) 128 | product_id = random.randint(1, len(products)) 129 | quantity = random.randint(1, 20) 130 | sale_price = products[product_id - 1][1] * random.uniform(0.9, 1.1) # Slight variation in sales prices 131 | sale_date = datetime.now() - timedelta(days=random.randint(1, 365)) 132 | 133 | sales_data.append((employee_id, product_id, quantity, sale_price, sale_date)) 134 | 135 | return sales_data 136 | 137 | # Insert sales into the database 138 | sales = generate_sales(500) 139 | cursor.executemany(''' 140 | INSERT INTO Sales (employee_id, product_id, quantity, sale_price, sale_date) 141 | VALUES (?, ?, ?, ?, ?) 142 | ''', sales) 143 | 144 | # Create ClockInClockOut table 145 | cursor.execute(''' 146 | CREATE TABLE ClockInClockOut ( 147 | clock_id INTEGER PRIMARY KEY, 148 | employee_id INTEGER, 149 | clock_in DATETIME, 150 | clock_out DATETIME, 151 | FOREIGN KEY (employee_id) REFERENCES Employees(employee_id) 152 | );''') 153 | 154 | # Generate clock-in/clock-out records 155 | def generate_clock_in_out(num_records): 156 | clock_data = [] 157 | for _ in range(num_records): 158 | employee_id = random.randint(1, len(employees)) 159 | clock_in = datetime.now() - timedelta(days=random.randint(1, 30), hours=random.randint(8, 9)) 160 | clock_out = clock_in + timedelta(hours=random.uniform(7, 9)) # Shift duration between 7-9 hours 161 | 162 | clock_data.append((employee_id, clock_in, clock_out)) 163 | 164 | return clock_data 165 | 166 | # Insert clock-in/out records into the database 167 | clock_in_out = generate_clock_in_out(1000) 168 | cursor.executemany(''' 169 | INSERT INTO ClockInClockOut (employee_id, clock_in, clock_out) 170 | VALUES (?, ?, ?) 171 | ''', clock_in_out) 172 | 173 | # Create Payroll table 174 | cursor.execute(''' 175 | CREATE TABLE Payroll ( 176 | payroll_id INTEGER PRIMARY KEY, 177 | employee_id INTEGER, 178 | pay_period_start DATE, 179 | pay_period_end DATE, 180 | hours_worked REAL, 181 | gross_pay REAL, 182 | deductions REAL, 183 | net_pay REAL, 184 | FOREIGN KEY (employee_id) REFERENCES Employees(employee_id) 185 | );''') 186 | 187 | # Generate realistic payroll data 188 | def generate_payroll(num_records): 189 | payroll_data = [] 190 | for _ in range(num_records): 191 | employee_id = random.randint(1, len(employees)) 192 | pay_period_start = datetime.now() - timedelta(days=random.randint(1, 30)) 193 | pay_period_end = pay_period_start + timedelta(days=14) # 2-week pay period 194 | hours_worked = random.uniform(70, 80) # Full-time hours 195 | gross_pay = hours_worked * random.uniform(20, 60) # Based on an hourly rate 196 | deductions = gross_pay * random.uniform(0.1, 0.25) # Deductions between 10-25% 197 | net_pay = gross_pay - deductions 198 | 199 | payroll_data.append((employee_id, pay_period_start, pay_period_end, hours_worked, gross_pay, deductions, net_pay)) 200 | 201 | return payroll_data 202 | 203 | # Insert payroll data into the database 204 | payroll = generate_payroll(1000) 205 | cursor.executemany(''' 206 | INSERT INTO Payroll (employee_id, pay_period_start, pay_period_end, hours_worked, gross_pay, deductions, net_pay) 207 | VALUES (?, ?, ?, ?, ?, ?, ?) 208 | ''', payroll) 209 | 210 | # Commit the changes and close the connection 211 | conn.commit() 212 | conn.close() 213 | -------------------------------------------------------------------------------- /table_rag/__init__.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | import json 3 | import logging 4 | import os 5 | # Assuming you're using OpenAI API for LLM 6 | import json_repair 7 | import re 8 | 9 | LLM_API_SERVER = os.environ.get("LLM_API_SERVER", "http://localhost:11434/v1") 10 | LLM_API_KEY = os.environ.get("LLM_API_KEY", "ollama") 11 | LLM_MODEL = os.environ.get("LLM_MODEL", "mistral-nemo") 12 | 13 | logging.basicConfig(level=logging.INFO) 14 | 15 | logging.debug(f"Using LLM API Server: {LLM_API_SERVER}, model: {LLM_MODEL}") 16 | 17 | # Helper function to load prompt templates 18 | 19 | 20 | def load_prompt_template(file_path): 21 | with open(file_path, 'r') as file: 22 | return file.read() 23 | 24 | 25 | class TableRAG: 26 | def __init__(self, db_path, llm_client, cell_encoding_budget=1000, retry_execute=3): 27 | self.db_path = db_path 28 | self.llm_client = llm_client 29 | self.cell_encoding_budget = cell_encoding_budget 30 | self.retry_execute = retry_execute 31 | 32 | self.history_message = [] 33 | 34 | schema_keys = self.schema_retrieval() 35 | self.schema = schema_keys[0] 36 | self.foreign_keys = schema_keys[1] 37 | self.cell_database = self.build_cell_db() 38 | # Load prompt templates 39 | self.query_expansion_prompt_template = load_prompt_template( 40 | 'prompts/query_expansion.prompt') 41 | self.sql_generation_prompt_template = load_prompt_template( 42 | 'prompts/sql_generation.prompt') 43 | self.query_classification_prompt_template = load_prompt_template( 44 | 'prompts/query_classification.prompt') 45 | self.query_healing_prompt_template = load_prompt_template( 46 | 'prompts/query_healing.prompt') # For query healing 47 | self.explain_result_prompt_template = load_prompt_template( 48 | 'prompts/explain_result.prompt') # For result explanation 49 | self.dig_deeper_prompt_template = load_prompt_template( 50 | 'prompts/dig_deeper.prompt') 51 | 52 | def add_message(self, message): 53 | self.history_message.append(message) 54 | 55 | def schema_retrieval(self, max_sample_length=100): 56 | logging.debug("Doing schema Retrieval") 57 | schema = {} 58 | foreign_keys = {} 59 | 60 | try: 61 | conn = sqlite3.connect(self.db_path) 62 | cursor = conn.cursor() 63 | 64 | cursor.execute( 65 | "SELECT name FROM sqlite_master WHERE type='table';") 66 | tables = cursor.fetchall() 67 | 68 | for table in tables: 69 | table_name = table[0] 70 | cursor.execute(f"PRAGMA table_info({table_name});") 71 | columns = cursor.fetchall() 72 | 73 | schema[table_name] = { 74 | "columns": [ 75 | { 76 | "name": column[1], 77 | "type": column[2], 78 | "sample": None 79 | } for column in columns 80 | ] 81 | } 82 | 83 | # Extract foreign keys 84 | cursor.execute(f"PRAGMA foreign_key_list({table_name});") 85 | foreign_key_info = cursor.fetchall() 86 | 87 | foreign_keys[table_name] = [ 88 | { 89 | "from": fk[3], # local column 90 | "to_table": fk[2], # referenced table 91 | "to": fk[4] # referenced column 92 | } 93 | for fk in foreign_key_info 94 | ] 95 | 96 | # Get sample data 97 | cursor.execute(f"SELECT * FROM {table_name} LIMIT 1;") 98 | sample_row = cursor.fetchone() 99 | 100 | if sample_row: 101 | for idx, column in enumerate(schema[table_name]["columns"]): 102 | sample_value = sample_row[idx] 103 | if isinstance(sample_value, str) and len(sample_value) > max_sample_length: 104 | column["sample"] = sample_value[:max_sample_length] + "..." 105 | else: 106 | column["sample"] = sample_value 107 | 108 | conn.close() 109 | 110 | except sqlite3.Error as e: 111 | logging.error(f"Failed to retrieve database schema: {e}") 112 | 113 | return schema, foreign_keys 114 | 115 | def schema_to_create_statements(self): 116 | create_statements = [] 117 | 118 | # Retrieve schema and foreign key information 119 | schema, foreign_keys = self.schema_retrieval() 120 | 121 | for table_name, table_data in schema.items(): 122 | columns = table_data["columns"] 123 | column_definitions = [] 124 | 125 | for column in columns: 126 | column_definitions.append(f"{column['name']} {column['type']}") 127 | 128 | # Create the table definition statement 129 | create_statement = f"CREATE TABLE {table_name} ({', '.join(column_definitions)});" 130 | create_statements.append(create_statement) 131 | 132 | # Add samples as comments 133 | for column in columns: 134 | if column["sample"]: 135 | create_statements.append( 136 | f"-- Sample: {column['name']} = {column['sample']}") 137 | 138 | # Add foreign key join hints as comments 139 | if table_name in foreign_keys: 140 | for fk in foreign_keys[table_name]: 141 | join_comment = f"-- {table_name}.{fk['from']} can be joined with {fk['to_table']}.{fk['to']}" 142 | create_statements.append(join_comment) 143 | 144 | # Optionally add inferred join hints based on naming convention (_id pattern) 145 | for column in columns: 146 | if column['name'].endswith('_id'): 147 | # e.g., 'product_id' -> 'product' 148 | inferred_table = column['name'][:-3] 149 | join_comment = f"-- {table_name}.{column['name']} might join with {inferred_table}.id" 150 | create_statements.append(join_comment) 151 | 152 | logging.debug("Schema to Create Statements: \n" + 153 | "\n".join(create_statements)) 154 | return "\n".join(create_statements) 155 | 156 | def build_cell_db(self): 157 | """ 158 | Builds a database of distinct column-value pairs for cell retrieval. 159 | Only the most frequent/distinct values are kept, respecting the cell encoding budget. 160 | """ 161 | cell_db = {} 162 | try: 163 | conn = sqlite3.connect(self.db_path) 164 | cursor = conn.cursor() 165 | 166 | cursor.execute( 167 | "SELECT name FROM sqlite_master WHERE type='table';") 168 | tables = cursor.fetchall() 169 | 170 | for table in tables: 171 | table_name = table[0] 172 | cursor.execute( 173 | f"SELECT * FROM {table_name} LIMIT {self.cell_encoding_budget};") 174 | rows = cursor.fetchall() 175 | 176 | if rows: 177 | cell_db[table_name] = {} 178 | for row in rows: 179 | for idx, column in enumerate(row): 180 | column_name = self.schema[table_name]["columns"][idx]["name"] 181 | if column_name not in cell_db[table_name]: 182 | cell_db[table_name][column_name] = set() 183 | if len(cell_db[table_name][column_name]) < self.cell_encoding_budget: 184 | cell_db[table_name][column_name].add(column) 185 | 186 | conn.close() 187 | except sqlite3.Error as e: 188 | logging.error(f"Failed to build cell database: {e}") 189 | 190 | return cell_db 191 | 192 | def get_relevant_cells(self, table_name, columns, cell_values): 193 | """ 194 | Retrieve relevant cells from the cell database based on columns and values. 195 | """ 196 | relevant_cells = {} 197 | if table_name in self.cell_database: 198 | table_cells = self.cell_database[table_name] 199 | for column in columns: 200 | if column in table_cells: 201 | # Only return cell values that match or are within the given column 202 | relevant_cells[column] = list( 203 | table_cells[column].intersection(cell_values)) 204 | return relevant_cells 205 | 206 | async def tabular_query_expansion(self, prompt): 207 | """ 208 | Expands the query into smaller schema and cell-specific queries using external prompt template. 209 | """ 210 | logging.debug("Doing Query Expansion") 211 | 212 | # Use the external query_expansion.prompt template 213 | query_expansion_prompt = self.query_expansion_prompt_template.format( 214 | schema=self.schema_to_create_statements(), 215 | user_query=prompt 216 | ) 217 | 218 | response = await self.llm_client.chat.completions.create( 219 | model=LLM_MODEL, 220 | messages=[ 221 | # history messages 222 | *self.history_message, 223 | {"role": "user", "content": query_expansion_prompt} 224 | ], 225 | stream=False 226 | # response_format=QueryExpansionResponse 227 | ) 228 | 229 | response_text = response.choices[0].message.content.strip() 230 | 231 | # Log full response for debugging 232 | logging.debug(f"Query Expansion Response: {response_text}") 233 | 234 | try: 235 | # Extract the JSON part of the response (assuming it’s wrapped in ```json blocks) 236 | json_text = re.search(r'```json(.*?)```', 237 | response_text, re.DOTALL).group(1).strip() 238 | logging.debug("Extracted JSON Data: " + json_text) 239 | expansion_data = json_repair.loads(json_text) 240 | 241 | # Use fallback for 'cell_values' in case it's missing 242 | columns = expansion_data.get("columns", []) 243 | cell_values = expansion_data.get( 244 | "cell_values", expansion_data.get("possible_cell_values", [])) 245 | 246 | # Log extracted columns and cell values for debugging 247 | logging.debug(f"Extracted Columns: {columns}") 248 | logging.debug(f"Extracted Cell Values: {cell_values}") 249 | 250 | return columns, cell_values 251 | 252 | except json.JSONDecodeError: 253 | raise ValueError("Failed to decode query expansion response") 254 | except KeyError as e: 255 | logging.error(f"Missing key in the expansion data: {e}") 256 | raise ValueError(f"Failed to extract necessary data: {str(e)}") 257 | 258 | async def generate_sql_query(self, natural_language_query): 259 | """ 260 | Generate SQL query from natural language input using query expansion and retrieval. 261 | """ 262 | # Step 1: Expand the query 263 | columns, cell_values = await self.tabular_query_expansion(natural_language_query) 264 | 265 | # Step 2: Get relevant cells from the cell database 266 | relevant_cells = {} 267 | for table_name in self.cell_database: 268 | relevant_cells[table_name] = self.get_relevant_cells( 269 | table_name, columns, cell_values) 270 | 271 | # Step 3: Use the relevant cells for query generation 272 | sql_prompt = self.sql_generation_prompt_template.format( 273 | schema=self.schema_to_create_statements(), 274 | user_query=natural_language_query, 275 | columns=columns, 276 | cell_values=json.dumps(relevant_cells, indent=4) 277 | ) 278 | 279 | logging.info(sql_prompt) 280 | 281 | response = await self.llm_client.chat.completions.create( 282 | model=LLM_MODEL, 283 | messages=[ 284 | *self.history_message, 285 | {"role": "user", "content": sql_prompt} 286 | ], 287 | stream=False 288 | ) 289 | 290 | sql_query = response.choices[0].message.content.strip() 291 | logging.debug("Generated SQL Query: " + sql_query) 292 | 293 | sql_query = re.search(r'```sql(.*?)```', sql_query, 294 | re.DOTALL).group(1).strip() 295 | 296 | logging.debug("Extracted SQL Query: " + sql_query) 297 | 298 | # Parse and refine SQL query 299 | return sql_query 300 | 301 | async def is_natural_language_query(self, input_text): 302 | """ 303 | Determine if the input is a natural language query. 304 | """ 305 | # Use the external query_classification.prompt template 306 | classification_prompt = self.query_classification_prompt_template.format( 307 | input_text=input_text) 308 | 309 | response = await self.llm_client.chat.completions.create( 310 | model=LLM_MODEL, 311 | messages=[{"role": "user", "content": classification_prompt}], 312 | stream=False 313 | ) 314 | 315 | return response.choices[0].message.content.strip() == "Natural Language Query" 316 | 317 | async def execute_sql_query(self, prompt, sql_query): 318 | """ 319 | Executes an SQL query and retries up to self.retry_execute times if errors occur. 320 | Uses the LLM to try and fix the query. 321 | """ 322 | attempt = 0 323 | last_error = None 324 | 325 | while attempt < self.retry_execute: 326 | try: 327 | logging.debug( 328 | f"Executing SQL query (Attempt {attempt + 1}/{self.retry_execute}): {sql_query}") 329 | conn = sqlite3.connect(self.db_path) 330 | cursor = conn.cursor() 331 | # Ensure sql_query is a string, not coroutine 332 | cursor.execute(sql_query) 333 | results = cursor.fetchall() 334 | columns = [description[0] 335 | for description in cursor.description] 336 | conn.close() 337 | return results, columns # Successful execution 338 | except sqlite3.Error as e: 339 | last_error = str(e) 340 | logging.error( 341 | f"Failed to execute query (Attempt {attempt + 1}): {e}") 342 | 343 | # Send the error and original query to the LLM for healing, and await it 344 | # Await the coroutine 345 | sql_query = await self.heal_sql_query(prompt, sql_query, last_error) 346 | 347 | # If the LLM did not provide a valid correction, break the loop 348 | if not sql_query: 349 | logging.error( 350 | "No valid correction from LLM. Stopping retry attempts.") 351 | break 352 | 353 | attempt += 1 354 | 355 | # If all retries failed, return the last error encountered 356 | logging.error( 357 | f"Failed to execute the query after {self.retry_execute} attempts.") 358 | return None, None 359 | 360 | async def heal_sql_query(self, prompt, failed_query, error_message): 361 | """ 362 | Sends the failed SQL query and error message to the LLM, asking for a correction. 363 | """ 364 | try: 365 | # Prepare the prompt using the healing prompt template 366 | healing_prompt = self.query_healing_prompt_template.format( 367 | prompt=prompt, 368 | original_query=failed_query, 369 | error_message=error_message, 370 | schema=self.schema_to_create_statements() 371 | ) 372 | 373 | logging.debug( 374 | f"Sending query healing prompt to LLM: {healing_prompt}") 375 | 376 | # Send the prompt to the LLM to generate a corrected SQL query 377 | response = await self.llm_client.chat.completions.create( 378 | model=LLM_MODEL, 379 | messages=[ 380 | *self.history_message, 381 | {"role": "user", "content": healing_prompt} 382 | ], 383 | stream=False 384 | ) 385 | 386 | # Extract the corrected SQL query from the LLM response 387 | corrected_query = response.choices[0].message.content.strip() 388 | 389 | logging.debug(f"Received corrected SQL query: {corrected_query}") 390 | 391 | # Extract SQL from code block (if it's inside a code block) 392 | corrected_query = re.search( 393 | r'```sql(.*?)```', corrected_query, re.DOTALL) 394 | if corrected_query: 395 | return corrected_query.group(1).strip() 396 | 397 | return corrected_query # Return the corrected query 398 | except Exception as e: 399 | logging.error(f"Failed to heal SQL query: {e}") 400 | return None # Return None if healing fails 401 | 402 | async def explain_result(self, result, prompt): 403 | """ 404 | Explains the result of a query using the LLM. 405 | """ 406 | # Prepare the prompt using the explain_result prompt template 407 | explain_prompt = self.explain_result_prompt_template.format( 408 | query=prompt, 409 | result=result 410 | ) 411 | 412 | logging.debug( 413 | f"Sending explain result prompt to LLM: {explain_prompt}") 414 | 415 | # Send the prompt to the LLM to generate an explanation 416 | response = await self.llm_client.chat.completions.create( 417 | model=LLM_MODEL, 418 | messages=[ 419 | *self.history_message, 420 | {"role": "user", "content": explain_prompt}], 421 | stream=False 422 | ) 423 | 424 | explanation = response.choices[0].message.content.strip() 425 | 426 | logging.debug(f"Received explanation: {explanation}") 427 | 428 | return explanation 429 | 430 | async def dig_deeper(self, previous_sql, previous_result, prompt, explaination): 431 | """ 432 | Dig deeper into the result of a query using the LLM. 433 | """ 434 | # Prepare the prompt using the dig_deeper prompt template 435 | dig_deeper_prompt = self.dig_deeper_prompt_template.format( 436 | previous_sql=previous_sql, 437 | schema=self.schema_to_create_statements(), 438 | previous_result=previous_result, 439 | user_query=prompt, 440 | explaination=explaination 441 | ) 442 | 443 | logging.debug(f"Sending dig deeper prompt to LLM: {dig_deeper_prompt}") 444 | 445 | # Send the prompt to the LLM to generate a deeper analysis 446 | response = await self.llm_client.chat.completions.create( 447 | model=LLM_MODEL, 448 | messages=[*self.history_message, 449 | {"role": "user", "content": dig_deeper_prompt}], 450 | stream=False 451 | ) 452 | 453 | sql_query = response.choices[0].message.content.strip() 454 | logging.debug("Generated SQL Query: " + sql_query) 455 | 456 | sql_query = re.search(r'```sql(.*?)```', sql_query, 457 | re.DOTALL).group(1).strip() 458 | 459 | logging.debug("Extracted SQL Query: " + sql_query) 460 | 461 | # Parse and refine SQL query 462 | return sql_query 463 | --------------------------------------------------------------------------------