├── database2prompt
├── __init__.py
├── json_generator
│ ├── __init__.py
│ └── json_generator.py
├── database
│ ├── core
│ │ ├── metadata.py
│ │ ├── database_config.py
│ │ ├── database_strategy.py
│ │ ├── database_factory.py
│ │ └── database_params.py
│ ├── pgsql
│ │ └── postgresql_strategy.py
│ └── processing
│ │ └── database_processor.py
├── main.py
└── markdown
│ └── markdown_generator.py
├── .gitignore
├── .env.example
├── docker-compose.yml
├── .github
├── docs
│ └── pull_request_template.md
└── workflows
│ ├── code-buddy-review.yaml
│ └── pypi-publish.yaml
├── pyproject.toml
├── tests
├── markdown
│ └── test_markdown_generator.py
└── database
│ └── processing
│ └── test_database_processor.py
├── LICENSE
├── README.md
└── poetry.lock
/database2prompt/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/database2prompt/json_generator/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 |
3 | .venv
4 | __pycache__
5 |
6 | dist
7 | .env
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | # Database configuration
2 | DB_HOST=localhost
3 | DB_PORT=5432
4 | DB_USER=postgres
5 | DB_PASSWORD=postgres
6 | DB_NAME=postgres
7 | DB_SCHEMA=public
8 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 | services:
3 | postgres:
4 | image: postgres
5 | network_mode: host
6 | environment:
7 | POSTGRES_USER: ${DB_USER}
8 | POSTGRES_PASSWORD: ${DB_PASSWORD}
9 | POSTGRES_DB: ${DB_NAME}
10 | volumes:
11 | - postgres-data:/var/lib/postgresql/data
12 |
13 | volumes:
14 | postgres-data:
15 | external: false
16 |
--------------------------------------------------------------------------------
/.github/docs/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## 📌 Summary
2 |
3 | Describe the purpose of this PR.
4 |
5 | ## ✅ Changes
6 |
7 | - [ ] Added feature X
8 | - [ ] Fixed bug Y
9 | - [ ] Refactored Z
10 |
11 | ## 🧪 How to Test
12 |
13 | Explain how reviewers can test this change.
14 |
15 | ## 🔗 Related Issues
16 |
17 | Closes #123
18 | Relates to #456
19 |
20 | ## 📷 Screenshots/Videos (if applicable)
21 |
22 | Add screenshots or videos of you testing the changes, if it is applicable.
23 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "database2prompt"
3 | version = "0.2.0"
4 | description = "Extract relevant metadata from databases and transform it into context for Retrieval-Augmented Generation (RAG) in generative AI applications."
5 | authors = [
6 | {name = "jose.neto",email = "jose.neto-ext@anbima.com.br"}
7 | ]
8 | readme = "README.md"
9 | requires-python = ">=3.12"
10 | dependencies = [
11 | "sqlalchemy (>=2.0.37,<3.0.0)",
12 | "psycopg2-binary (>=2.9.10,<3.0.0)",
13 | "pytest (>=8.3.4,<9.0.0)",
14 | "python-dotenv (>=1.1.0,<2.0.0)"
15 | ]
16 |
17 | [build-system]
18 | requires = ["poetry-core>=2.0.0,<3.0.0"]
19 | build-backend = "poetry.core.masonry.api"
20 |
--------------------------------------------------------------------------------
/tests/markdown/test_markdown_generator.py:
--------------------------------------------------------------------------------
1 | # from database2prompt.markdown.markdown_generator import MarkdownGenerator
2 |
3 | # def test_should_generate_database_table_of_contents():
4 | # generator = MarkdownGenerator({
5 | # "tables": {
6 | # "table_a": {},
7 | # "table_b": {},
8 | # "table_c": {}
9 | # },
10 | # "views": {}
11 | # })
12 | # markdown = generator.generate()
13 |
14 | # lines = markdown.splitlines()
15 | # assert len(lines) == 13
16 | # assert lines[0] == "# Table of contents"
17 | # assert lines[1] == "- table_a"
18 | # assert lines[2] == "- table_b"
19 | # assert lines[3] == "- table_c"
20 |
--------------------------------------------------------------------------------
/.github/workflows/code-buddy-review.yaml:
--------------------------------------------------------------------------------
1 | name: CodeBuddy CodeReview
2 | on:
3 | pull_request:
4 | types: [ opened, synchronize ]
5 |
6 | jobs:
7 | ai_agent_code_review:
8 | runs-on: ubuntu-latest
9 | name: Automatic code review
10 | steps:
11 | - name: CodeBuddy
12 | id: code_review
13 | uses: code-buddy-agent/code-buddy@v1.0.0
14 | with:
15 | owner: "${{ github.repository_owner }}"
16 | repository: database2prompt
17 | pull_request_number: "${{ github.event.pull_request.number }}"
18 | github_token: "${{ secrets.GH_TOKEN }}"
19 | code_buddy_key: "${{ secrets.CODE_BUDDY_KEY }}"
20 | stack: "python"
21 | total_comments: 1
--------------------------------------------------------------------------------
/.github/workflows/pypi-publish.yaml:
--------------------------------------------------------------------------------
1 | name: Publish database2prompt to PyPI
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'release/**'
7 |
8 | jobs:
9 | build-and-publish:
10 | name: Build and publish to PyPI
11 | runs-on: ubuntu-latest
12 | environment: release
13 | permissions:
14 | id-token: write
15 | steps:
16 | - name: Checkout code
17 | uses: actions/checkout@v4
18 | - name: Set up Python
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: '3.10'
22 | - name: Install poetry
23 | run: pip install poetry
24 | - name: Build project
25 | run: poetry build
26 | - name: Publish to PyPI
27 | uses: pypa/gh-action-pypi-publish@release/v1
28 |
--------------------------------------------------------------------------------
/database2prompt/database/core/metadata.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Dict, Optional, List
3 |
4 | @dataclass
5 | class ColumnMetadata:
6 | """Metadata for a database column"""
7 | description: str
8 | business_rules: Optional[str] = None
9 | value_examples: Optional[List[str]] = None
10 | constraints: Optional[str] = None
11 | data_type_info: Optional[str] = None
12 | tags: Optional[List[str]] = None
13 |
14 | @dataclass
15 | class TableMetadata:
16 | """Metadata for a database table"""
17 | description: str
18 | domain: Optional[str] = None
19 | update_frequency: Optional[str] = None
20 | owner: Optional[str] = None
21 | tags: Optional[List[str]] = None
22 | columns: Dict[str, ColumnMetadata] = None
23 |
24 | def __post_init__(self):
25 | if self.columns is None:
26 | self.columns = {}
27 |
--------------------------------------------------------------------------------
/database2prompt/database/core/database_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from dataclasses import dataclass
3 | from dotenv import load_dotenv
4 |
5 | @dataclass
6 | class DatabaseConfig:
7 | host: str = "localhost"
8 | port: int = 5432
9 | user: str = "postgres"
10 | password: str = "postgres"
11 | database: str = "postgres"
12 | schema: str = "public"
13 |
14 | @staticmethod
15 | def from_env() -> "DatabaseConfig":
16 | """Create database config from environment variables"""
17 | load_dotenv()
18 |
19 | return DatabaseConfig(
20 | host=os.getenv("DB_HOST", "localhost"),
21 | port=int(os.getenv("DB_PORT", "5432")),
22 | user=os.getenv("DB_USER", "postgres"),
23 | password=os.getenv("DB_PASSWORD", "postgres"),
24 | database=os.getenv("DB_NAME", "postgres"),
25 | schema=os.getenv("DB_SCHEMA", "public")
26 | )
27 |
--------------------------------------------------------------------------------
/database2prompt/database/core/database_strategy.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from sqlalchemy import Table
3 | from typing import List, Dict
4 |
5 | class DatabaseStrategy(ABC):
6 |
7 | @abstractmethod
8 | def connection(self):
9 | pass
10 |
11 | @abstractmethod
12 | def list_schemas(self) -> List[str]:
13 | pass
14 |
15 | @abstractmethod
16 | def list_tables(self, schema_name: str):
17 | pass
18 |
19 | @abstractmethod
20 | def estimated_rows(self, tables_name: str):
21 | pass
22 |
23 | @abstractmethod
24 | def table_object(self, table: str, schema: str) -> Table:
25 | pass
26 |
27 | @abstractmethod
28 | def list_views(self) -> List[Dict[str, str]]:
29 | pass
30 |
31 | @abstractmethod
32 | def create_materialized_view(self, query):
33 | pass
34 |
35 | @abstractmethod
36 | def get_table_sample(self, table: str, schema: str, limit: int = 3) -> List[Dict]:
37 | pass
38 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Orla
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/database2prompt/database/core/database_factory.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 | from database2prompt.database.core.database_strategy import DatabaseStrategy
3 | from database2prompt.database.core.database_config import DatabaseConfig
4 | from database2prompt.database.pgsql.postgresql_strategy import PostgreSQLStrategy
5 |
6 | class DatabaseFactory:
7 | strategies = {
8 | "pgsql": PostgreSQLStrategy
9 | }
10 |
11 | @staticmethod
12 | def run(
13 | db: str,
14 | config: Optional[DatabaseConfig] = None
15 | ) -> DatabaseStrategy:
16 | """Create a database strategy instance
17 |
18 | Args:
19 | db (str): Database type (e.g. 'pgsql')
20 | config (Optional[DatabaseConfig], optional): Database configuration.
21 | If None, will load from environment variables. Defaults to None.
22 |
23 | Returns:
24 | DatabaseStrategy: Database strategy instance
25 |
26 | Raises:
27 | ValueError: If database type is not supported
28 | """
29 | strategy_class = DatabaseFactory.strategies.get(db)
30 | if not strategy_class:
31 | raise ValueError(f"Database '{db}' not implemented yet")
32 |
33 | if config is None:
34 | config = DatabaseConfig.from_env()
35 |
36 | return strategy_class(config)
37 |
--------------------------------------------------------------------------------
/database2prompt/database/core/database_params.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List, Set
2 |
3 |
4 | class DatabaseParams:
5 | def __init__(self):
6 | self.table_contexts: Dict[str, str] = {}
7 | self._tables: List[str] = []
8 | self._ignored_tables: Set[str] = set()
9 |
10 | def table_contexts(self, contexts: Dict[str, str]) -> None:
11 | """Define table contexts
12 |
13 | Args:
14 | contexts (Dict[str, str]): Dictionary mapping table names to their contexts
15 | """
16 | self.table_contexts = contexts
17 |
18 | def tables(self, tables: List[str]) -> None:
19 | """Define tables to be documented
20 |
21 | Args:
22 | tables (List[str]): List of table names to document
23 | """
24 | self._tables = tables
25 |
26 | def ignore_tables(self, tables: List[str]) -> None:
27 | """Define tables to be ignored in documentation
28 |
29 | Args:
30 | tables (List[str]): List of table names to ignore
31 | """
32 | self._ignored_tables.update(tables)
33 |
34 | def should_document_table(self, table_name: str) -> bool:
35 | """Check if a table should be documented
36 |
37 | Args:
38 | table_name (str): Name of the table to check
39 |
40 | Returns:
41 | bool: True if the table should be documented, False otherwise
42 | """
43 | if table_name in self._ignored_tables:
44 | return False
45 | if not self._tables:
46 | return True
47 | return table_name in self._tables
48 |
--------------------------------------------------------------------------------
/database2prompt/main.py:
--------------------------------------------------------------------------------
1 | from database2prompt.database.core.database_factory import DatabaseFactory
2 | from database2prompt.database.core.database_params import DatabaseParams
3 | from database2prompt.database.core.database_config import DatabaseConfig
4 | from database2prompt.database.processing.database_processor import DatabaseProcessor
5 | import json
6 | from database2prompt.json_generator.json_generator import DatabaseJSONEncoder
7 |
8 |
9 |
10 | def main():
11 |
12 | config = DatabaseConfig().from_env()
13 |
14 | strategy = DatabaseFactory.run("pgsql", config)
15 | next(strategy.connection())
16 | print("Connected to the database!")
17 |
18 | # Tabelas para documentar
19 | # tables_to_discovery = ["table_1", "table_2", "table_3"]
20 |
21 | # # Tabelas para ignorar
22 | # tables_to_ignore = ["operacional.xx"]
23 |
24 | params = DatabaseParams()
25 | # params.tables(tables_to_discovery)
26 | # params.ignore_tables(tables_to_ignore) # Ignora estas tabelas na documentação
27 |
28 | database_processor = DatabaseProcessor(strategy, params)
29 |
30 | # Generate Markdown
31 | markdown_content = database_processor.database_to_prompt(output_format="markdown")
32 | with open("summary-database.md", "w") as file:
33 | file.write(markdown_content)
34 | print("Markdown file generated: summary-database.md")
35 |
36 | # Generate JSON
37 | json_content = database_processor.database_to_prompt(output_format="json")
38 | with open("summary-database.json", "w", encoding="utf-8") as file:
39 | json.dump(json_content, file, indent=2, ensure_ascii=False, cls=DatabaseJSONEncoder)
40 | print("JSON file generated: summary-database.json")
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------
/database2prompt/json_generator/json_generator.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Any, Dict
3 |
4 | class DatabaseJSONEncoder(json.JSONEncoder):
5 | """
6 | Custom JSON encoder for database objects.
7 | """
8 | def default(self, obj: Any) -> Any:
9 | # Para lidar com todos os possíveis tipos de dados diferentes vindos do sample_data
10 | try:
11 | return str(obj)
12 | except Exception:
13 | print(f"Error serializing object of type {type(obj)}: {obj}")
14 | return None
15 |
16 | class JsonGenerator:
17 | def __init__(self, database_info: dict):
18 | self.database_info = database_info
19 |
20 | def generate(self) -> Dict:
21 | """Generate a JSON structure from the database information
22 |
23 | Returns:
24 | Dict: JSON-compatible dictionary with database structure
25 | """
26 | result = {
27 | "tables": [],
28 | "views": []
29 | }
30 |
31 | # Process tables
32 | for table_name, table_info in self.database_info["tables"].items():
33 | table_data = {
34 | "table_name": table_name,
35 | "schema": table_info["schema"],
36 | "estimated_rows": table_info["estimated_rows"],
37 | "columns": [],
38 | "sample_data": table_info.get("sample_data", [])
39 | }
40 |
41 | # Process columns
42 | for column_name, column_info in table_info["fields"].items():
43 | column_data = {
44 | "name": column_name,
45 | "type": self.format_type(column_info["type"]),
46 | "nullable": column_info["nullable"] == "NULL",
47 | "default": column_info["default"]
48 | }
49 | table_data["columns"].append(column_data)
50 |
51 | result["tables"].append(table_data)
52 |
53 | # Process views
54 | for view_name, view_info in self.database_info["views"].items():
55 | view_data = {
56 | "view_name": view_name,
57 | "schema": view_info["schema"],
58 | "sql": view_info["ddl"]
59 | }
60 | result["views"].append(view_data)
61 |
62 | return result
63 |
64 | def format_type(self, type: str) -> str:
65 | if "varchar(None)" in type:
66 | return "varchar"
67 |
68 | return type
--------------------------------------------------------------------------------
/database2prompt/markdown/markdown_generator.py:
--------------------------------------------------------------------------------
1 | class MarkdownGenerator:
2 |
3 | def __init__(self, processed_info: dict):
4 | self.processed_info = processed_info
5 |
6 | def generate(self):
7 | """Generates a markdown file given a string value."""
8 |
9 | tables = self.processed_info["tables"]
10 |
11 | md_content = "# Table of contents\n"
12 | for table_key in tables.keys():
13 | md_content += f"- {table_key}\n"
14 |
15 | for table_key in tables.keys():
16 | table_data = tables[table_key]
17 | md_content += "\n"
18 | md_content += f"## Table: {table_key}\n"
19 | md_content += f"- Estimated rows: {table_data["estimated_rows"]}\n"
20 | md_content += "\n"
21 |
22 | md_content += f"### Code\n\n"
23 |
24 | md_content += "```sql\n"
25 | full_qualified_name = table_key if table_data["schema"] != None else table_data["name"]
26 | md_content += f"CREATE TABLE {full_qualified_name} (\n"
27 |
28 | for index, column_key in enumerate(table_data["fields"].keys()):
29 | column_data = table_data["fields"][column_key]
30 |
31 | column_type = self.format_type(column_data["type"])
32 | default = column_data["default"] if column_data["default"] != None else ""
33 | nullable = column_data["nullable"]
34 |
35 | md_content += f" {column_key} {column_type} {default} {nullable},\n"
36 | md_content += ");\n"
37 | md_content += "```\n"
38 |
39 |
40 | if "sample_data" in table_data and table_data["sample_data"]:
41 | md_content += "\n### Sample Data\n\n"
42 | md_content += "```sql\n"
43 |
44 | headers = list(table_data["sample_data"][0].keys())
45 | md_content += "| " + " | ".join(headers) + " |\n"
46 | md_content += "| " + " | ".join(["---"] * len(headers)) + " |\n"
47 |
48 |
49 | for row in table_data["sample_data"]:
50 | values = [str(row.get(header, "")) for header in headers]
51 | md_content += "| " + " | ".join(values) + " |\n"
52 | md_content += "```\n"
53 |
54 | md_content += "\n"
55 | md_content += "# Views \n"
56 |
57 | views = self.processed_info["views"]
58 |
59 | for view_key in views.keys():
60 | md_content += f"- {view_key}\n"
61 |
62 | for view_key in views.keys():
63 | view = views[view_key]
64 |
65 | md_content += "\n"
66 | md_content += f"## View: {view_key}\n"
67 | md_content += "\n"
68 | md_content += "### DDL\n"
69 | md_content += "```sql\n"
70 | md_content += f"{view["ddl"]}\n"
71 | md_content += "```\n"
72 |
73 | return md_content
74 |
75 | def format_type(self, type: str) -> str:
76 | if "varchar(None)" in type:
77 | return "varchar"
78 |
79 | return type
80 |
81 |
82 |
--------------------------------------------------------------------------------
/database2prompt/database/pgsql/postgresql_strategy.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import create_engine, inspect, text, MetaData, Table
2 | from sqlalchemy.orm import sessionmaker
3 | from typing import List, Dict
4 |
5 | from database2prompt.database.core.database_strategy import DatabaseStrategy
6 | from database2prompt.database.core.database_config import DatabaseConfig
7 |
8 |
9 | class PostgreSQLStrategy(DatabaseStrategy):
10 | def __init__(self, config: DatabaseConfig):
11 | self.config = config
12 | self.database_url = f"postgresql+psycopg2://{config.user}:{config.password}@{config.host}:{config.port}/{config.database}"
13 | self.engine = create_engine(self.database_url)
14 | self.metadata = MetaData(schema=config.schema)
15 | self.metadata.reflect(bind=self.engine)
16 | self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
17 |
18 | def connection(self):
19 | db = self.SessionLocal()
20 | try:
21 | yield db
22 | finally:
23 | db.close()
24 |
25 | def list_schemas(self):
26 | """Get all schemas of database"""
27 | inspector = inspect(self.engine)
28 | return filter(lambda s: s not in ["pg_catalog", "information_schema"], inspector.get_schema_names())
29 |
30 | def list_tables(self, schema_name):
31 | """Return all table names of database"""
32 | inspector = inspect(self.engine)
33 | tables = inspector.get_table_names(schema_name)
34 | return tables
35 |
36 | def estimated_rows(self, tables_name):
37 | query = """
38 | SELECT relname AS table_name, reltuples::bigint AS estimated_rows
39 | FROM pg_class
40 | WHERE relname = ANY(:table_names)
41 | """
42 |
43 | with self.engine.connect() as connection:
44 | result = connection.execute(text(query), {"table_names": tables_name})
45 | return {row._mapping["table_name"]: row._mapping["estimated_rows"] for row in result}
46 |
47 | def table_object(self, table, schema):
48 | metadata = MetaData()
49 | return Table(table, metadata, schema=schema, autoload_with=self.engine)
50 |
51 | def list_views(self):
52 | query = """
53 | SELECT schemaname, viewname, definition
54 | FROM pg_views
55 | WHERE schemaname NOT IN ('pg_catalog', 'information_schema');
56 | """
57 |
58 | views = []
59 | with self.engine.connect() as connection:
60 | result = connection.execute(text(query))
61 | for row in result:
62 | print(f"Schema: {row.schemaname}, View: {row.viewname}\nDefinição:\n{row.definition}\n")
63 | views.append({"schema": row.schemaname, "name": row.viewname, "ddl": row.definition})
64 |
65 | return views
66 |
67 | def create_materialized_view(self, sql):
68 | with self.engine.connect() as connection:
69 | result = connection.execute(text(sql))
70 | connection.commit()
71 |
72 | def get_table_sample(self, table: str, schema: str, limit: int = 3) -> List[Dict]:
73 | query = f"""
74 | SELECT *
75 | FROM {schema}.{table}
76 | LIMIT {limit}
77 | """
78 |
79 | with self.engine.connect() as connection:
80 | result = connection.execute(text(query))
81 | return [dict(row._mapping) for row in result]
82 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # database2prompt
2 |
3 |
4 |
5 | 
6 |
7 |
8 |
9 | An open-source project designed to extract relevant data from databases and transform it into context for Retrieval-Augmented Generation (RAG) in generative AI applications.
10 |
11 | ## How is it useful?
12 |
13 | database2prompt makes it easy to generate prompts to LLMS by reading your database and generating a markdown containing its schema. This provides context for the AI to maximize the effectiveness of your prompts.
14 |
15 |
16 | ## Databases Support (WIP)
17 |
18 | | Databases | Support |
19 | |--------------|---------|
20 | | PostgreSQL | ✅ |
21 |
22 | We will add support for most databases including analytical databases
23 |
24 |
25 | ## Output Formats
26 |
27 | | Output Format | Support |
28 | |--------------|---------|
29 | | JSON | ✅ |
30 | | Markdown | ✅ |
31 |
32 | ## Example Outputs
33 |
34 | You can find example outputs generated by database2prompt in the following files:
35 |
36 | - [summary-database.md](summary-database.md) - Example of markdown output
37 | - [summary-database.json](summary-database.json) - Example of JSON output
38 |
39 | ## Usage
40 |
41 | ### Installation
42 |
43 | ```bash
44 | pip install database2prompt
45 | ```
46 |
47 | ### Quick Start
48 |
49 | Here's a simple example of how to use database2prompt:
50 |
51 | ```python
52 | from database2prompt.database.core.database_config import DatabaseConfig
53 | from database2prompt.database.core.database_params import DatabaseParams
54 | from database2prompt.database.core.database_factory import DatabaseFactory
55 | from database2prompt.database.processing.database_processor import DatabaseProcessor
56 | from database2prompt.markdown.markdown_generator import MarkdownGenerator
57 |
58 | # 1. Configure database connection
59 | config = DatabaseConfig(
60 | host="localhost",
61 | port=5432,
62 | user="your_user",
63 | password="your_password",
64 | database="your_database",
65 | schema="your_schema"
66 | )
67 |
68 | # 2. Connect to database
69 | strategy = DatabaseFactory.run("pgsql", config)
70 | next(strategy.connection())
71 |
72 | # 3. Configure which tables to document
73 | params = DatabaseParams()
74 |
75 | # Option A: Document specific tables
76 | params.tables(["schema.table1", "schema.table2"])
77 |
78 | # Option B: Ignore specific tables
79 | params.ignore_tables(["schema.table_to_ignore"])
80 |
81 | # 4. Process database information
82 | database_processor = DatabaseProcessor(strategy, params)
83 |
84 | # 5. Generate content to prompt (markdown or json)
85 | content = database_processor.database_to_prompt(output_format="json")
86 |
87 | ```
88 |
89 | ### Configuration
90 |
91 | Configure the database connection:
92 |
93 | ```bash
94 | # .env file
95 | DB_HOST=localhost
96 | DB_PORT=5432
97 | DB_USER=postgres
98 | DB_PASSWORD=postgres
99 | DB_NAME=postgres
100 | DB_SCHEMA=public
101 | ```
102 |
103 | ```python
104 | config = DatabaseConfig.from_env()
105 | ```
106 |
107 | ## Contributing
108 |
109 | ### Development Setup
110 |
111 | 1. Clone the repository:
112 | ```bash
113 | git clone https://github.com/orladigital/database2prompt.git
114 | cd database2prompt
115 | ```
116 |
117 | 2. Create a virtual environment:
118 | ```bash
119 | python -m venv .venv
120 | source .venv/bin/activate # On Windows: .venv\Scripts\activate
121 | ```
122 |
123 | 3. Install development dependencies:
124 | ```bash
125 | pip install poetry
126 | poetry install
127 | ```
128 |
129 | 4. Start the development database (optional):
130 | ```bash
131 | docker compose up -d
132 | ```
133 |
134 | 5. Run the project:
135 | ```bash
136 | poetry run python database2prompt/main.py
137 | ```
138 |
139 | ### How to Contribute
140 |
141 | You can contribute to database2prompt in many different ways:
142 |
143 | * Suggest a feature
144 | * Code an approved feature idea (check our issues)
145 | * Report a bug
146 | * Fix something and open a pull request
147 | * Help with documentation
148 | * Spread the word!
149 |
150 | ## License
151 |
152 | Licensed under the MIT License, see [LICENSE](https://github.com/orladigital/database2prompt/blob/main/LICENSE) for more information.
153 |
154 |
--------------------------------------------------------------------------------
/database2prompt/database/processing/database_processor.py:
--------------------------------------------------------------------------------
1 | from sqlite3.dbapi2 import paramstyle
2 |
3 | from ..core.database_params import DatabaseParams
4 | from ..core.database_strategy import DatabaseStrategy
5 | from ...markdown.markdown_generator import MarkdownGenerator
6 | from ...json_generator.json_generator import JsonGenerator
7 |
8 | from typing import List, Dict, Literal
9 |
10 | from sqlalchemy import Table, Boolean
11 | from sqlalchemy.schema import FetchedValue, Computed, Identity, DefaultClause
12 | from sqlalchemy.sql.type_api import TypeEngine
13 | from sqlalchemy.sql.sqltypes import VARCHAR, INTEGER, BIGINT, NUMERIC, CHAR, DATE, TIMESTAMP, TEXT, DOUBLE_PRECISION
14 | from sqlalchemy.dialects.postgresql.types import TSVECTOR
15 | from sqlalchemy.dialects.postgresql.named_types import DOMAIN
16 |
17 | OutputFormat = Literal["json", "markdown"]
18 |
19 | class DatabaseProcessor():
20 |
21 | def __init__(self, database: DatabaseStrategy, params: DatabaseParams):
22 | self.database = database
23 | self.processed_info = {
24 | "tables": {},
25 | "views": {}
26 | }
27 | self.params = params
28 |
29 | def database_to_prompt(self, output_format: OutputFormat = "markdown") -> str:
30 | """Generate documentation from database in the specified format
31 |
32 | Args:
33 | output_format (str): The output format - either "json" or "markdown"
34 |
35 | Returns:
36 | str: The generated documentation in the specified format
37 | """
38 | # Process database information
39 | processed_info = self.process_data(verbose=False)
40 |
41 | # Generate output based on format
42 | if output_format == "markdown":
43 | generator = MarkdownGenerator(processed_info)
44 | return generator.generate()
45 | elif output_format == "json":
46 | generator = JsonGenerator(processed_info)
47 | return generator.generate()
48 | else:
49 | raise ValueError("Output format must be either 'json' or 'markdown'")
50 |
51 | def process_data(self, verbose: bool = False) -> dict:
52 | """Take the information of the database and process it for output generation
53 |
54 | Args:
55 | verbose (bool, optional): If True, prints discovery progress. Defaults to False.
56 |
57 | Returns:
58 | dict: Processed database information
59 | """
60 | # Reset processed info to ensure clean state
61 | self.processed_info = {
62 | "tables": {},
63 | "views": {}
64 | }
65 |
66 | schemas = list(self.database.list_schemas())
67 | if len(schemas) != 0:
68 | self.__iterate_tables(schemas, verbose)
69 | views = self.database.list_views()
70 | if len(views) != 0:
71 | self.__iterate_views(views, verbose)
72 | return self.processed_info
73 |
74 | def __iterate_tables(self, schemas: list[str], verbose: bool = False):
75 | for schema_name in schemas:
76 | tables = self.database.list_tables(schema_name)
77 | all_estimated_rows = self.database.estimated_rows(tables)
78 |
79 | for table_name in tables:
80 | fully_qualified_name = f"{schema_name}.{table_name}" if schema_name != None else table_name
81 |
82 | # Verifica se a tabela deve ser ignorada
83 | if not self.params.should_document_table(fully_qualified_name):
84 | if verbose:
85 | print(f"Skipping {fully_qualified_name} table (ignored)...")
86 | continue
87 |
88 | if verbose:
89 | print(f"Discovering {fully_qualified_name} table...")
90 |
91 | table = self.database.table_object(table_name, schema_name)
92 | fields = self.__get_processed_fields(table)
93 |
94 |
95 | try:
96 | sample_data = self.database.get_table_sample(table_name, schema_name)
97 | except Exception as e:
98 | if verbose:
99 | print(f"Could not get sample data for {fully_qualified_name}: {str(e)}")
100 | sample_data = []
101 |
102 | self.processed_info["tables"][fully_qualified_name] = {
103 | "name": table_name,
104 | "schema": schema_name,
105 | "estimated_rows": all_estimated_rows.get(table_name),
106 | "fields": fields,
107 | "sample_data": sample_data
108 | }
109 |
110 | def __get_processed_fields(self, table: Table):
111 | fields = {}
112 | for (name, column) in table.columns.items():
113 | fields[name] = {
114 | "type": self.__get_processed_type(column.type),
115 | "default": self.__get_processed_default_value(column.server_default),
116 | "nullable": self.__get_processed_nullable(column.nullable),
117 | }
118 | return fields
119 |
120 | def __get_processed_type(self, type: TypeEngine):
121 | if isinstance(type, VARCHAR):
122 | return f"varchar({type.length})"
123 | elif isinstance(type, CHAR):
124 | return "bpchar" if type.length == None else f"bpchar({type.length})"
125 | elif isinstance(type, INTEGER):
126 | return "int4"
127 | elif isinstance(type, BIGINT):
128 | return "int8"
129 | elif isinstance(type, NUMERIC):
130 | return f"numeric({type.precision},{type.scale})"
131 | elif isinstance(type, DATE):
132 | return "date"
133 | elif isinstance(type, TIMESTAMP):
134 | return "timestamp"
135 | elif isinstance(type, TSVECTOR):
136 | return "tsvector"
137 | elif isinstance(type, DOMAIN):
138 | return f"{type.schema}.{type.name}"
139 | elif isinstance(type, TEXT):
140 | return "text"
141 | elif isinstance(type, DOUBLE_PRECISION):
142 | return "double precision"
143 | else:
144 | return str(type)
145 |
146 | def __get_processed_default_value(self, default: FetchedValue):
147 | if default is None: return
148 |
149 | if isinstance(default, DefaultClause):
150 | return f"DEFAULT {default.arg}"
151 | elif isinstance(default, Computed):
152 | return f"GENERATED ALWAYS AS {default.sqltext}{" STORED" if default.persisted else ""}"
153 | elif isinstance(default, Identity):
154 | increment_by = f"INCREMENT BY {default.increment}"
155 | min_value = f"MINVALUE {default.minvalue}"
156 | max_value = f"MAXVALUE {default.maxvalue}"
157 | start = f"START {default.start}"
158 | cache = f"CACHE {default.cache}"
159 | cycle = "CYCLE" if default.cycle else "NO CYCLE"
160 |
161 | return f"GENERATED BY DEFAULT AS IDENTITY({increment_by} {min_value} {max_value} {start} {cache} {cycle})"
162 | else:
163 | raise ValueError(f"Type {default.__class__} not implemented yet")
164 |
165 | def __get_processed_nullable(self, nullable: bool):
166 | return "NOT NULL" if not nullable else "NULL"
167 |
168 | def __iterate_views(self, views: List[Dict[str, str]], verbose: bool = False):
169 | for view in views:
170 | fully_qualified_name = f"{view["schema"]}.{view["name"]}" if view["schema"] != None else view["name"]
171 | if verbose:
172 | print(f"Discovering {fully_qualified_name} view...")
173 | self.processed_info["views"][fully_qualified_name] = {
174 | "name": view["name"],
175 | "schema": view["schema"],
176 | "ddl": view["ddl"]
177 | }
178 |
--------------------------------------------------------------------------------
/tests/database/processing/test_database_processor.py:
--------------------------------------------------------------------------------
1 | from database2prompt.database.processing.database_processor import DatabaseProcessor
2 | from database2prompt.database.core.database_strategy import DatabaseStrategy
3 |
4 | from sqlalchemy import Table, Column
5 | from sqlalchemy.schema import DefaultClause, Computed, Identity
6 | from sqlalchemy.types import VARCHAR, CHAR, INTEGER, BIGINT, NUMERIC, DATE, TIMESTAMP
7 | from sqlalchemy.sql.base import ReadOnlyColumnCollection
8 | from sqlalchemy.dialects.postgresql.types import TSVECTOR
9 | from sqlalchemy.dialects.postgresql.named_types import DOMAIN
10 |
11 | from unittest.mock import Mock
12 |
13 | def test_processed_info():
14 | mock_database_strategy = Mock(DatabaseStrategy)
15 | mock_table = Mock(Table)
16 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
17 |
18 | mock_database_strategy.list_schemas.return_value = ["op"]
19 | mock_database_strategy.list_tables.return_value = ["stock", "employee"]
20 | mock_database_strategy.list_views.return_value = []
21 | mock_database_strategy.estimated_rows.return_value = {}
22 | mock_database_strategy.table_object.return_value = mock_table
23 |
24 | mock_table.columns = mock_readonly_column_collection
25 | mock_readonly_column_collection.items.return_value = []
26 |
27 | processor = DatabaseProcessor(mock_database_strategy)
28 | result = processor.process_data()
29 | tables = result["tables"]
30 |
31 | assert tables["op.stock"] is not None
32 | assert tables["op.stock"]["schema"] == "op"
33 | assert tables["op.stock"]["name"] == "stock"
34 |
35 | assert tables["op.employee"] is not None
36 | assert tables["op.employee"]["schema"] == "op"
37 | assert tables["op.employee"]["name"] == "employee"
38 |
39 | def test_processed_estimated_rows():
40 | mock_database_strategy = Mock(DatabaseStrategy)
41 | mock_table = Mock(Table)
42 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
43 |
44 | mock_database_strategy.list_schemas.return_value = ["public"]
45 | mock_database_strategy.list_tables.return_value = ["stock", "employee", "user"]
46 | mock_database_strategy.list_views.return_value = []
47 | mock_database_strategy.estimated_rows.return_value = { "user": 1, "stock": 203, "employee": 54 }
48 | mock_database_strategy.table_object.return_value = mock_table
49 |
50 | mock_table.columns = mock_readonly_column_collection
51 | mock_readonly_column_collection.items.return_value = []
52 |
53 | processor = DatabaseProcessor(mock_database_strategy)
54 | result = processor.process_data()
55 | tables = result["tables"]
56 |
57 | assert tables["public.stock"]["estimated_rows"] == 203
58 | assert tables["public.user"]["estimated_rows"] == 1
59 | assert tables["public.employee"]["estimated_rows"] == 54
60 |
61 |
62 | def test_processed_fields_types():
63 | mock_database_strategy = Mock(DatabaseStrategy)
64 | mock_table = Mock(Table)
65 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
66 | mock_column_varchar = Mock(Column)
67 | mock_column_char_no_limit = Mock(Column)
68 | mock_column_char_with_limit = Mock(Column)
69 | mock_column_integer = Mock(Column)
70 | mock_column_bigint = Mock(Column)
71 | mock_column_numeric = Mock(Column)
72 | mock_column_date = Mock(Column)
73 | mock_column_timestamp = Mock(Column)
74 | mock_column_tsvector = Mock(Column)
75 | mock_column_domain = Mock(Column)
76 | mock_type_varchar = Mock(VARCHAR)
77 | mock_type_char_no_limit = Mock(CHAR)
78 | mock_type_char_with_limit = Mock(CHAR)
79 | mock_type_integer = Mock(INTEGER)
80 | mock_type_bigint = Mock(BIGINT)
81 | mock_type_numeric = Mock(NUMERIC)
82 | mock_type_date = Mock(DATE)
83 | mock_type_timestamp = Mock(TIMESTAMP)
84 | mock_type_tsvector = Mock(TSVECTOR)
85 | mock_type_domain = Mock(DOMAIN)
86 |
87 | mock_database_strategy.list_schemas.return_value = ["public"]
88 | mock_database_strategy.list_tables.return_value = ["user"]
89 | mock_database_strategy.list_views.return_value = []
90 | mock_database_strategy.estimated_rows.return_value = { "user": 1 }
91 | mock_database_strategy.table_object.return_value = mock_table
92 |
93 | mock_table.columns = mock_readonly_column_collection
94 | mock_readonly_column_collection.items.return_value = [
95 | ("col0", mock_column_varchar),
96 | ("col1", mock_column_char_no_limit),
97 | ("col2", mock_column_char_with_limit),
98 | ("col3", mock_column_integer),
99 | ("col4", mock_column_bigint),
100 | ("col5", mock_column_numeric),
101 | ("col6", mock_column_date),
102 | ("col7", mock_column_timestamp),
103 | ("col8", mock_column_tsvector),
104 | ("col9", mock_column_domain)
105 | ]
106 |
107 | mock_column_varchar.type = mock_type_varchar
108 | mock_column_varchar.server_default = None
109 | mock_column_varchar.nullable = False
110 | mock_type_varchar.length = 4
111 |
112 | mock_column_char_no_limit.type = mock_type_char_no_limit
113 | mock_column_char_no_limit.server_default = None
114 | mock_column_char_no_limit.nullable = False
115 | mock_type_char_no_limit.length = None
116 |
117 | mock_column_char_with_limit.type = mock_type_char_with_limit
118 | mock_column_char_with_limit.server_default = None
119 | mock_column_char_with_limit.nullable = False
120 | mock_type_char_with_limit.length = 128
121 |
122 | mock_column_integer.type = mock_type_integer
123 | mock_column_integer.server_default = None
124 | mock_column_integer.nullable = False
125 |
126 | mock_column_bigint.type = mock_type_bigint
127 | mock_column_bigint.server_default = None
128 | mock_column_bigint.nullable = False
129 |
130 | mock_column_numeric.type = mock_type_numeric
131 | mock_column_numeric.server_default = None
132 | mock_column_numeric.nullable = False
133 | mock_type_numeric.precision = 24
134 | mock_type_numeric.scale = 5
135 |
136 | mock_column_date.type = mock_type_date
137 | mock_column_date.server_default = None
138 | mock_column_date.nullable = False
139 |
140 | mock_column_timestamp.type = mock_type_timestamp
141 | mock_column_timestamp.server_default = None
142 | mock_column_timestamp.nullable = False
143 |
144 | mock_column_tsvector.type = mock_type_tsvector
145 | mock_column_tsvector.server_default = None
146 | mock_column_tsvector.nullable = False
147 |
148 | mock_column_domain.type = mock_type_domain
149 | mock_column_domain.server_default = None
150 | mock_column_domain.nullable = False
151 | mock_type_domain.schema = "another_schema"
152 | mock_type_domain.name = "my_domain"
153 |
154 | processor = DatabaseProcessor(mock_database_strategy)
155 | result = processor.process_data()
156 |
157 | tables = result["tables"]
158 | user_table = tables['public.user']
159 | fields = user_table["fields"]
160 |
161 | assert fields["col0"]["type"] == "varchar(4)"
162 | assert fields["col1"]["type"] == "bpchar"
163 | assert fields["col2"]["type"] == "bpchar(128)"
164 | assert fields["col3"]["type"] == "int4"
165 | assert fields["col4"]["type"] == "int8"
166 | assert fields["col5"]["type"] == "numeric(24,5)"
167 | assert fields["col6"]["type"] == "date"
168 | assert fields["col7"]["type"] == "timestamp"
169 | assert fields["col8"]["type"] == "tsvector"
170 | assert fields["col9"]["type"] == "another_schema.my_domain"
171 |
172 | def test_processed_fields_default_clause():
173 | mock_database_strategy = Mock(DatabaseStrategy)
174 | mock_table = Mock(Table)
175 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
176 | mock_column_default = Mock(Column)
177 | mock_default_clause = Mock(DefaultClause)
178 |
179 | mock_database_strategy.list_schemas.return_value = ["public"]
180 | mock_database_strategy.list_tables.return_value = ["user"]
181 | mock_database_strategy.list_views.return_value = []
182 | mock_database_strategy.estimated_rows.return_value = { "user": 1 }
183 | mock_database_strategy.table_object.return_value = mock_table
184 |
185 | mock_table.columns = mock_readonly_column_collection
186 | mock_readonly_column_collection.items.return_value = [("col0", mock_column_default)]
187 |
188 | mock_column_default.type = Mock(INTEGER)
189 | mock_column_default.server_default = mock_default_clause
190 | mock_column_default.nullable = True
191 | mock_default_clause.arg = 123
192 |
193 | processor = DatabaseProcessor(mock_database_strategy)
194 | result = processor.process_data()
195 |
196 | tables = result["tables"]
197 | user_table = tables['public.user']
198 | fields = user_table["fields"]
199 |
200 | assert fields["col0"]["default"] == "DEFAULT 123"
201 |
202 | def test_processed_fields_computed_value():
203 | mock_database_strategy = Mock(DatabaseStrategy)
204 | mock_table = Mock(Table)
205 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
206 | mock_column_computed_not_persisted = Mock(Column)
207 | mock_column_computed_persisted = Mock(Column)
208 | mock_type_computed_not_persisted = Mock(Computed)
209 | mock_type_computed_persisted = Mock(Computed)
210 |
211 | mock_database_strategy.list_schemas.return_value = ["public"]
212 | mock_database_strategy.list_tables.return_value = ["user"]
213 | mock_database_strategy.list_views.return_value = []
214 | mock_database_strategy.estimated_rows.return_value = { "user": 1 }
215 | mock_database_strategy.table_object.return_value = mock_table
216 |
217 | mock_table.columns = mock_readonly_column_collection
218 | mock_readonly_column_collection.items.return_value = [
219 | ("col0", mock_column_computed_not_persisted),
220 | ("col1", mock_column_computed_persisted)
221 | ]
222 |
223 | mock_column_computed_not_persisted.type = Mock(INTEGER)
224 | mock_column_computed_not_persisted.server_default = mock_type_computed_not_persisted
225 | mock_column_computed_not_persisted.nullable = True
226 | mock_type_computed_not_persisted.sqltext = "({some sql not persisted})"
227 | mock_type_computed_not_persisted.persisted = False
228 |
229 | mock_column_computed_persisted.type = Mock(INTEGER)
230 | mock_column_computed_persisted.server_default = mock_type_computed_persisted
231 | mock_column_computed_persisted.nullable = True
232 | mock_type_computed_persisted.sqltext = "({another sql but persisted})"
233 | mock_type_computed_persisted.persisted = True
234 |
235 | processor = DatabaseProcessor(mock_database_strategy)
236 | result = processor.process_data()
237 |
238 | tables = result["tables"]
239 | user_table = tables['public.user']
240 | fields = user_table["fields"]
241 |
242 | assert fields["col0"]["default"] == "GENERATED ALWAYS AS ({some sql not persisted})"
243 | assert fields["col1"]["default"] == "GENERATED ALWAYS AS ({another sql but persisted}) STORED"
244 |
245 | def test_processed_fields_generated_identity():
246 | mock_database_strategy = Mock(DatabaseStrategy)
247 | mock_table = Mock(Table)
248 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
249 | mock_column_identity_cycle = Mock(Column)
250 | mock_column_identity_no_cycle = Mock(Column)
251 | mock_type_identity_cycle = Mock(Identity)
252 | mock_type_identity_no_cycle = Mock(Identity)
253 |
254 | mock_database_strategy.list_schemas.return_value = ["public"]
255 | mock_database_strategy.list_tables.return_value = ["user"]
256 | mock_database_strategy.list_views.return_value = []
257 | mock_database_strategy.estimated_rows.return_value = { "user": 1 }
258 | mock_database_strategy.table_object.return_value = mock_table
259 |
260 | mock_table.columns = mock_readonly_column_collection
261 | mock_readonly_column_collection.items.return_value = [
262 | ("col0", mock_column_identity_cycle),
263 | ("col1", mock_column_identity_no_cycle)
264 | ]
265 |
266 | mock_column_identity_cycle.type = Mock(INTEGER)
267 | mock_column_identity_cycle.server_default = mock_type_identity_cycle
268 | mock_column_identity_cycle.nullable = True
269 | mock_type_identity_cycle.increment = 1
270 | mock_type_identity_cycle.minvalue = 2
271 | mock_type_identity_cycle.maxvalue = 3
272 | mock_type_identity_cycle.start = 4
273 | mock_type_identity_cycle.cache = 5
274 | mock_type_identity_cycle.cycle = True
275 |
276 | mock_column_identity_no_cycle.type = Mock(INTEGER)
277 | mock_column_identity_no_cycle.server_default = mock_type_identity_no_cycle
278 | mock_column_identity_no_cycle.nullable = True
279 | mock_type_identity_no_cycle.increment = 2
280 | mock_type_identity_no_cycle.minvalue = -5
281 | mock_type_identity_no_cycle.maxvalue = 10
282 | mock_type_identity_no_cycle.start = 1
283 | mock_type_identity_no_cycle.cache = 420
284 | mock_type_identity_no_cycle.cycle = False
285 |
286 | processor = DatabaseProcessor(mock_database_strategy)
287 | result = processor.process_data()
288 |
289 | tables = result["tables"]
290 | user_table = tables['public.user']
291 | fields = user_table["fields"]
292 |
293 | assert fields["col0"]["default"] == "GENERATED BY DEFAULT AS IDENTITY(INCREMENT BY 1 MINVALUE 2 MAXVALUE 3 START 4 CACHE 5 CYCLE)"
294 | assert fields["col1"]["default"] == "GENERATED BY DEFAULT AS IDENTITY(INCREMENT BY 2 MINVALUE -5 MAXVALUE 10 START 1 CACHE 420 NO CYCLE)"
295 |
296 | def test_processed_fields_nullable():
297 | mock_database_strategy = Mock(DatabaseStrategy)
298 | mock_table = Mock(Table)
299 | mock_readonly_column_collection = Mock(ReadOnlyColumnCollection)
300 | mock_column_null = Mock(Column)
301 | mock_column_not_null = Mock(Column)
302 |
303 | mock_database_strategy.list_schemas.return_value = ["public"]
304 | mock_database_strategy.list_tables.return_value = ["user"]
305 | mock_database_strategy.list_views.return_value = []
306 | mock_database_strategy.estimated_rows.return_value = { "user": 1 }
307 | mock_database_strategy.table_object.return_value = mock_table
308 |
309 | mock_table.columns = mock_readonly_column_collection
310 | mock_readonly_column_collection.items.return_value = [
311 | ("col0", mock_column_null),
312 | ("col1", mock_column_not_null),
313 | ]
314 |
315 | mock_column_null.type = Mock(INTEGER)
316 | mock_column_null.server_default = None
317 | mock_column_null.nullable = True
318 |
319 | mock_column_not_null.type = Mock(INTEGER)
320 | mock_column_not_null.server_default = None
321 | mock_column_not_null.nullable = False
322 |
323 | processor = DatabaseProcessor(mock_database_strategy)
324 | result = processor.process_data()
325 |
326 | tables = result["tables"]
327 | user_table = tables['public.user']
328 | fields = user_table["fields"]
329 |
330 | assert fields["col0"]["nullable"] == "NULL"
331 | assert fields["col1"]["nullable"] == "NOT NULL"
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | # This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
2 |
3 | [[package]]
4 | name = "colorama"
5 | version = "0.4.6"
6 | description = "Cross-platform colored terminal text."
7 | optional = false
8 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
9 | groups = ["main"]
10 | markers = "sys_platform == \"win32\""
11 | files = [
12 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
13 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
14 | ]
15 |
16 | [[package]]
17 | name = "greenlet"
18 | version = "3.1.1"
19 | description = "Lightweight in-process concurrent programming"
20 | optional = false
21 | python-versions = ">=3.7"
22 | groups = ["main"]
23 | markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"
24 | files = [
25 | {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
26 | {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
27 | {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
28 | {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
29 | {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
30 | {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
31 | {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
32 | {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
33 | {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
34 | {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
35 | {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
36 | {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
37 | {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
38 | {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
39 | {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
40 | {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
41 | {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
42 | {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
43 | {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
44 | {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
45 | {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
46 | {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
47 | {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
48 | {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
49 | {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
50 | {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
51 | {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
52 | {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
53 | {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
54 | {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
55 | {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
56 | {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
57 | {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
58 | {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
59 | {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
60 | {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
61 | {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
62 | {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
63 | {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
64 | {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
65 | {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
66 | {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
67 | {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
68 | {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"},
69 | {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"},
70 | {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"},
71 | {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"},
72 | {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"},
73 | {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"},
74 | {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"},
75 | {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"},
76 | {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"},
77 | {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"},
78 | {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"},
79 | {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"},
80 | {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"},
81 | {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"},
82 | {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"},
83 | {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"},
84 | {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"},
85 | {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"},
86 | {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"},
87 | {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
88 | {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
89 | {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
90 | {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
91 | {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
92 | {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
93 | {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
94 | {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
95 | {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
96 | {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
97 | {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
98 | ]
99 |
100 | [package.extras]
101 | docs = ["Sphinx", "furo"]
102 | test = ["objgraph", "psutil"]
103 |
104 | [[package]]
105 | name = "iniconfig"
106 | version = "2.0.0"
107 | description = "brain-dead simple config-ini parsing"
108 | optional = false
109 | python-versions = ">=3.7"
110 | groups = ["main"]
111 | files = [
112 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
113 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
114 | ]
115 |
116 | [[package]]
117 | name = "packaging"
118 | version = "24.2"
119 | description = "Core utilities for Python packages"
120 | optional = false
121 | python-versions = ">=3.8"
122 | groups = ["main"]
123 | files = [
124 | {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
125 | {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
126 | ]
127 |
128 | [[package]]
129 | name = "pluggy"
130 | version = "1.5.0"
131 | description = "plugin and hook calling mechanisms for python"
132 | optional = false
133 | python-versions = ">=3.8"
134 | groups = ["main"]
135 | files = [
136 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
137 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
138 | ]
139 |
140 | [package.extras]
141 | dev = ["pre-commit", "tox"]
142 | testing = ["pytest", "pytest-benchmark"]
143 |
144 | [[package]]
145 | name = "psycopg2-binary"
146 | version = "2.9.10"
147 | description = "psycopg2 - Python-PostgreSQL Database Adapter"
148 | optional = false
149 | python-versions = ">=3.8"
150 | groups = ["main"]
151 | files = [
152 | {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
153 | {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
154 | {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
155 | {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
156 | {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
157 | {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
158 | {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
159 | {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
160 | {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
161 | {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
162 | {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
163 | {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
164 | {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
165 | {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
166 | {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
167 | {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
168 | {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
169 | {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
170 | {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
171 | {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
172 | {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
173 | {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
174 | {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
175 | {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
176 | {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
177 | {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
178 | {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
179 | {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
180 | {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
181 | {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
182 | {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
183 | {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
184 | {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
185 | {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
186 | {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
187 | {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
188 | {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
189 | {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
190 | {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
191 | {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
192 | {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
193 | {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
194 | {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
195 | {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
196 | {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
197 | {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
198 | {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
199 | {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
200 | {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
201 | {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
202 | {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
203 | {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
204 | {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
205 | {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
206 | {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
207 | {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
208 | {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
209 | {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
210 | {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
211 | {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
212 | {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
213 | {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
214 | {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
215 | {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
216 | {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
217 | {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
218 | {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
219 | {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
220 | ]
221 |
222 | [[package]]
223 | name = "pytest"
224 | version = "8.3.4"
225 | description = "pytest: simple powerful testing with Python"
226 | optional = false
227 | python-versions = ">=3.8"
228 | groups = ["main"]
229 | files = [
230 | {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
231 | {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
232 | ]
233 |
234 | [package.dependencies]
235 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
236 | iniconfig = "*"
237 | packaging = "*"
238 | pluggy = ">=1.5,<2"
239 |
240 | [package.extras]
241 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
242 |
243 | [[package]]
244 | name = "python-dotenv"
245 | version = "1.1.0"
246 | description = "Read key-value pairs from a .env file and set them as environment variables"
247 | optional = false
248 | python-versions = ">=3.9"
249 | groups = ["main"]
250 | files = [
251 | {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"},
252 | {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"},
253 | ]
254 |
255 | [package.extras]
256 | cli = ["click (>=5.0)"]
257 |
258 | [[package]]
259 | name = "sqlalchemy"
260 | version = "2.0.37"
261 | description = "Database Abstraction Library"
262 | optional = false
263 | python-versions = ">=3.7"
264 | groups = ["main"]
265 | files = [
266 | {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"},
267 | {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"},
268 | {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f5d254a22394847245f411a2956976401e84da4288aa70cbcd5190744062c1"},
269 | {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41296bbcaa55ef5fdd32389a35c710133b097f7b2609d8218c0eabded43a1d84"},
270 | {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bedee60385c1c0411378cbd4dc486362f5ee88deceea50002772912d798bb00f"},
271 | {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c67415258f9f3c69867ec02fea1bf6508153709ecbd731a982442a590f2b7e4"},
272 | {file = "SQLAlchemy-2.0.37-cp310-cp310-win32.whl", hash = "sha256:650dcb70739957a492ad8acff65d099a9586b9b8920e3507ca61ec3ce650bb72"},
273 | {file = "SQLAlchemy-2.0.37-cp310-cp310-win_amd64.whl", hash = "sha256:93d1543cd8359040c02b6614421c8e10cd7a788c40047dbc507ed46c29ae5636"},
274 | {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c"},
275 | {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5"},
276 | {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8"},
277 | {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b"},
278 | {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087"},
279 | {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9"},
280 | {file = "SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989"},
281 | {file = "SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba"},
282 | {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef"},
283 | {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4"},
284 | {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4"},
285 | {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd"},
286 | {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098"},
287 | {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb"},
288 | {file = "SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761"},
289 | {file = "SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff"},
290 | {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658"},
291 | {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb"},
292 | {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4"},
293 | {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94"},
294 | {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0"},
295 | {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6"},
296 | {file = "SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2"},
297 | {file = "SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2"},
298 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44f569d0b1eb82301b92b72085583277316e7367e038d97c3a1a899d9a05e342"},
299 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2eae3423e538c10d93ae3e87788c6a84658c3ed6db62e6a61bb9495b0ad16bb"},
300 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfff7be361048244c3aa0f60b5e63221c5e0f0e509f4e47b8910e22b57d10ae7"},
301 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:5bc3339db84c5fb9130ac0e2f20347ee77b5dd2596ba327ce0d399752f4fce39"},
302 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:84b9f23b0fa98a6a4b99d73989350a94e4a4ec476b9a7dfe9b79ba5939f5e80b"},
303 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-win32.whl", hash = "sha256:51bc9cfef83e0ac84f86bf2b10eaccb27c5a3e66a1212bef676f5bee6ef33ebb"},
304 | {file = "SQLAlchemy-2.0.37-cp37-cp37m-win_amd64.whl", hash = "sha256:8e47f1af09444f87c67b4f1bb6231e12ba6d4d9f03050d7fc88df6d075231a49"},
305 | {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6b788f14c5bb91db7f468dcf76f8b64423660a05e57fe277d3f4fad7b9dcb7ce"},
306 | {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521ef85c04c33009166777c77e76c8a676e2d8528dc83a57836b63ca9c69dcd1"},
307 | {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75311559f5c9881a9808eadbeb20ed8d8ba3f7225bef3afed2000c2a9f4d49b9"},
308 | {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cce918ada64c956b62ca2c2af59b125767097ec1dca89650a6221e887521bfd7"},
309 | {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9d087663b7e1feabea8c578d6887d59bb00388158e8bff3a76be11aa3f748ca2"},
310 | {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cf95a60b36997dad99692314c4713f141b61c5b0b4cc5c3426faad570b31ca01"},
311 | {file = "SQLAlchemy-2.0.37-cp38-cp38-win32.whl", hash = "sha256:d75ead7dd4d255068ea0f21492ee67937bd7c90964c8f3c2bea83c7b7f81b95f"},
312 | {file = "SQLAlchemy-2.0.37-cp38-cp38-win_amd64.whl", hash = "sha256:74bbd1d0a9bacf34266a7907d43260c8d65d31d691bb2356f41b17c2dca5b1d0"},
313 | {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:648ec5acf95ad59255452ef759054f2176849662af4521db6cb245263ae4aa33"},
314 | {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35bd2df269de082065d4b23ae08502a47255832cc3f17619a5cea92ce478b02b"},
315 | {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f581d365af9373a738c49e0c51e8b18e08d8a6b1b15cc556773bcd8a192fa8b"},
316 | {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82df02816c14f8dc9f4d74aea4cb84a92f4b0620235daa76dde002409a3fbb5a"},
317 | {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94b564e38b344d3e67d2e224f0aec6ba09a77e4582ced41e7bfd0f757d926ec9"},
318 | {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:955a2a765aa1bd81aafa69ffda179d4fe3e2a3ad462a736ae5b6f387f78bfeb8"},
319 | {file = "SQLAlchemy-2.0.37-cp39-cp39-win32.whl", hash = "sha256:03f0528c53ca0b67094c4764523c1451ea15959bbf0a8a8a3096900014db0278"},
320 | {file = "SQLAlchemy-2.0.37-cp39-cp39-win_amd64.whl", hash = "sha256:4b12885dc85a2ab2b7d00995bac6d967bffa8594123b02ed21e8eb2205a7584b"},
321 | {file = "SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1"},
322 | {file = "sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb"},
323 | ]
324 |
325 | [package.dependencies]
326 | greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
327 | typing-extensions = ">=4.6.0"
328 |
329 | [package.extras]
330 | aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
331 | aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
332 | aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
333 | asyncio = ["greenlet (!=0.4.17)"]
334 | asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
335 | mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
336 | mssql = ["pyodbc"]
337 | mssql-pymssql = ["pymssql"]
338 | mssql-pyodbc = ["pyodbc"]
339 | mypy = ["mypy (>=0.910)"]
340 | mysql = ["mysqlclient (>=1.4.0)"]
341 | mysql-connector = ["mysql-connector-python"]
342 | oracle = ["cx_oracle (>=8)"]
343 | oracle-oracledb = ["oracledb (>=1.0.1)"]
344 | postgresql = ["psycopg2 (>=2.7)"]
345 | postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
346 | postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
347 | postgresql-psycopg = ["psycopg (>=3.0.7)"]
348 | postgresql-psycopg2binary = ["psycopg2-binary"]
349 | postgresql-psycopg2cffi = ["psycopg2cffi"]
350 | postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
351 | pymysql = ["pymysql"]
352 | sqlcipher = ["sqlcipher3_binary"]
353 |
354 | [[package]]
355 | name = "typing-extensions"
356 | version = "4.12.2"
357 | description = "Backported and Experimental Type Hints for Python 3.8+"
358 | optional = false
359 | python-versions = ">=3.8"
360 | groups = ["main"]
361 | files = [
362 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
363 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
364 | ]
365 |
366 | [metadata]
367 | lock-version = "2.1"
368 | python-versions = ">=3.12"
369 | content-hash = "fee25df19c21da33c2bc3638fd615fc99290987f6a31d5280f3121eab18989d3"
370 |
--------------------------------------------------------------------------------