├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── BUG_REPORT.md │ └── FEATURE_REQUEST.md ├── dependabot.yml ├── pull_request_template.md ├── repo_meta.yaml └── workflows │ ├── build_test.yml │ ├── changelog.yml │ ├── cla_bot.yml │ ├── create_req_files.yml │ ├── jira_close.yml │ ├── jira_comment.yml │ ├── jira_issue.yml │ ├── parameters │ ├── parameters_aws.py.gpg │ ├── parameters_azure.py.gpg │ └── parameters_gcp.py.gpg │ ├── python-publish.yml │ ├── semgrep.yml │ ├── snyk-issue.yml │ ├── snyk-pr.yml │ └── stale_issue_bot.yml ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── DESCRIPTION.md ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── ci ├── build.sh ├── build_docker.sh ├── docker │ └── sqlalchemy_build │ │ ├── Dockerfile │ │ └── scripts │ │ └── entrypoint.sh ├── set_base_image.sh ├── test.sh ├── test_docker.sh └── test_linux.sh ├── license_header.txt ├── pyproject.toml ├── setup.cfg ├── snyk ├── requirements.txt ├── requiremtnts.txt └── update_requirements.py ├── src └── snowflake │ └── sqlalchemy │ ├── __init__.py │ ├── _constants.py │ ├── base.py │ ├── compat.py │ ├── custom_commands.py │ ├── custom_types.py │ ├── exc.py │ ├── functions.py │ ├── parser │ └── custom_type_parser.py │ ├── provision.py │ ├── requirements.py │ ├── snowdialect.py │ ├── sql │ ├── __init__.py │ └── custom_schema │ │ ├── __init__.py │ │ ├── clustered_table.py │ │ ├── custom_table_base.py │ │ ├── custom_table_prefix.py │ │ ├── dynamic_table.py │ │ ├── hybrid_table.py │ │ ├── iceberg_table.py │ │ ├── options │ │ ├── __init__.py │ │ ├── as_query_option.py │ │ ├── cluster_by_option.py │ │ ├── identifier_option.py │ │ ├── invalid_table_option.py │ │ ├── keyword_option.py │ │ ├── keywords.py │ │ ├── literal_option.py │ │ ├── table_option.py │ │ └── target_lag_option.py │ │ ├── snowflake_table.py │ │ └── table_from_query.py │ ├── util.py │ └── version.py ├── tested_requirements ├── requirements_310.reqs ├── requirements_37.reqs ├── requirements_38.reqs └── requirements_39.reqs ├── tests ├── README.rst ├── __init__.py ├── __snapshots__ │ ├── test_compile_dynamic_table.ambr │ ├── test_core.ambr │ ├── test_orm.ambr │ ├── test_reflect_dynamic_table.ambr │ ├── test_structured_datatypes.ambr │ └── test_unit_structured_types.ambr ├── conftest.py ├── custom_tables │ ├── __init__.py │ ├── __snapshots__ │ │ ├── test_compile_dynamic_table.ambr │ │ ├── test_compile_hybrid_table.ambr │ │ ├── test_compile_iceberg_table.ambr │ │ ├── test_compile_snowflake_table.ambr │ │ ├── test_create_dynamic_table.ambr │ │ ├── test_create_hybrid_table.ambr │ │ ├── test_create_iceberg_table.ambr │ │ ├── test_create_snowflake_table.ambr │ │ ├── test_generic_options.ambr │ │ ├── test_reflect_hybrid_table.ambr │ │ └── test_reflect_snowflake_table.ambr │ ├── test_compile_dynamic_table.py │ ├── test_compile_hybrid_table.py │ ├── test_compile_iceberg_table.py │ ├── test_compile_snowflake_table.py │ ├── test_create_dynamic_table.py │ ├── test_create_hybrid_table.py │ ├── test_create_iceberg_table.py │ ├── test_create_snowflake_table.py │ ├── test_generic_options.py │ ├── test_reflect_dynamic_table.py │ ├── test_reflect_hybrid_table.py │ └── test_reflect_snowflake_table.py ├── data │ └── users.txt ├── sqlalchemy_test_suite │ ├── README.md │ ├── __init__.py │ ├── conftest.py │ ├── test_suite.py │ └── test_suite_20.py ├── test_compiler.py ├── test_copy.py ├── test_core.py ├── test_create.py ├── test_custom_functions.py ├── test_custom_types.py ├── test_geography.py ├── test_geometry.py ├── test_imports.py ├── test_index_reflection.py ├── test_multivalues_insert.py ├── test_orm.py ├── test_pandas.py ├── test_qmark.py ├── test_quote.py ├── test_quote_identifiers.py ├── test_semi_structured_datatypes.py ├── test_sequence.py ├── test_structured_datatypes.py ├── test_timestamp.py ├── test_transactions.py ├── test_unit_core.py ├── test_unit_cte.py ├── test_unit_structured_types.py ├── test_unit_types.py ├── test_unit_url.py └── util.py └── tox.ini /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @snowflakedb/ORM 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/BUG_REPORT.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 🐞 3 | about: Something isn't working as expected? Here is the right place to report. 4 | labels: bug, needs triage 5 | --- 6 | 7 | Please answer these questions before submitting your issue. Thanks! 8 | 9 | 1. What version of Python are you using? 10 | 11 | Replace with the output of `python --version --version` 12 | 13 | 2. What operating system and processor architecture are you using? 14 | 15 | Replace with the output of `python -c 'import platform; print(platform.platform())'` 16 | 17 | 3. What are the component versions in the environment (`pip freeze`)? 18 | 19 | Replace with the output of `python -m pip freeze` 20 | 21 | 4. What did you do? 22 | 23 | If possible, provide a recipe for reproducing the error. 24 | A complete runnable program is good. 25 | 26 | 5. What did you expect to see? 27 | 28 | What should have happened and what happened instead? 29 | 30 | 6. Can you set logging to DEBUG and collect the logs? 31 | 32 | ``` 33 | import logging 34 | import os 35 | 36 | for logger_name in ['snowflake.sqlalchemy', 'snowflake.connector']: 37 | logger = logging.getLogger(logger_name) 38 | logger.setLevel(logging.DEBUG) 39 | ch = logging.StreamHandler() 40 | ch.setLevel(logging.DEBUG) 41 | ch.setFormatter(logging.Formatter('%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) 42 | logger.addHandler(ch) 43 | ``` 44 | 45 | 48 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 💡 3 | about: Suggest a new idea for the project. 4 | labels: feature 5 | --- 6 | 7 | ## What is the current behavior? 8 | 9 | ## What is the desired behavior? 10 | 11 | ## How would this improve `snowflake-connector-python`? 12 | 13 | ## References, Other Background 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | assignees: 8 | - "sfc-gh-mkeller" 9 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Please answer these questions before submitting your pull requests. Thanks! 2 | 3 | 1. What GitHub issue is this PR addressing? Make sure that there is an accompanying issue to your PR. 4 | 5 | Fixes #NNNN 6 | 7 | 2. Fill out the following pre-review checklist: 8 | 9 | - [ ] I am adding a new automated test(s) to verify correctness of my new code 10 | - [ ] I am adding new logging messages 11 | - [ ] I am adding new credentials 12 | - [ ] I am adding a new dependency 13 | 14 | 3. Please describe how your code solves the related issue. 15 | 16 | Please write a short description of how your code change solves the related issue. 17 | -------------------------------------------------------------------------------- /.github/repo_meta.yaml: -------------------------------------------------------------------------------- 1 | point_of_contact: @snowflakedb/client 2 | production: true 3 | code_owners_file_present: true 4 | jira_area: Snowpark: Application Development Ecosystem 5 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog Check 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, labeled, unlabeled] 6 | branches: 7 | - main 8 | 9 | jobs: 10 | check_change_log: 11 | runs-on: ubuntu-latest 12 | if: ${{!contains(github.event.pull_request.labels.*.name, 'NO-CHANGELOG-UPDATES')}} 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | with: 17 | persist-credentials: false 18 | fetch-depth: 0 19 | 20 | - name: Ensure DESCRIPTION.md is updated 21 | run: git diff --name-only --diff-filter=ACMRT ${{ github.event.pull_request.base.sha }} ${{ github.sha }} | grep -wq "DESCRIPTION.md" 22 | -------------------------------------------------------------------------------- /.github/workflows/cla_bot.yml: -------------------------------------------------------------------------------- 1 | name: "CLA Assistant" 2 | on: 3 | issue_comment: 4 | types: [created] 5 | pull_request_target: 6 | types: [opened,closed,synchronize] 7 | 8 | jobs: 9 | CLAssistant: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | actions: write 13 | contents: write 14 | pull-requests: write 15 | statuses: write 16 | steps: 17 | - name: "CLA Assistant" 18 | if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' 19 | uses: cla-assistant/github-action@master 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_BOT_TOKEN }} 23 | with: 24 | path-to-signatures: 'signatures/version1.json' 25 | path-to-document: 'https://github.com/snowflakedb/CLA/blob/main/README.md' 26 | branch: 'main' 27 | allowlist: 'dependabot[bot],github-actions, sfc-gh-snyk-sca-sa' 28 | remote-organization-name: 'snowflakedb' 29 | remote-repository-name: 'cla-db' 30 | -------------------------------------------------------------------------------- /.github/workflows/create_req_files.yml: -------------------------------------------------------------------------------- 1 | name: Create Requirements Files 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | create-req-files: 8 | name: Create requirements files 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | persist-credentials: false 17 | - name: Set up Python 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Display Python version 22 | run: python -c "import sys; print(sys.version)" 23 | - name: Upgrade setuptools, pip and wheel 24 | run: python -m pip install -U setuptools pip wheel uv 25 | - name: Install Snowflake SQLAlchemy 26 | shell: bash 27 | run: python -m uv pip install . 28 | - name: Generate reqs file name 29 | shell: bash 30 | run: echo "requirements_file=temp_requirement/requirements_$(python -c 'from sys import version_info;print(str(version_info.major)+str(version_info.minor))').reqs" >> $GITHUB_ENV 31 | - name: Create reqs file 32 | shell: bash 33 | run: | 34 | mkdir temp_requirement 35 | echo "# Generated on: $(python --version)" >${{ env.requirements_file }} 36 | python -m pip freeze | grep -v snowflake-sqlalchemy 1>>${{ env.requirements_file }} 2>/dev/null 37 | echo "snowflake-sqlalchemy==$(python -m uv pip show snowflake-sqlalchemy | grep ^Version | cut -d' ' -f2-)" >>${{ env.requirements_file }} 38 | id: create-reqs-file 39 | - name: Show created req file 40 | shell: bash 41 | run: cat ${{ env.requirements_file }} 42 | - uses: actions/upload-artifact@v4 43 | with: 44 | path: temp_requirement 45 | 46 | push-files: 47 | needs: create-req-files 48 | name: Commit and push files 49 | runs-on: ubuntu-latest 50 | steps: 51 | - uses: actions/checkout@v4 52 | with: 53 | persist-credentials: false 54 | token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets 55 | - name: Download requirement files 56 | uses: actions/download-artifact@v4 57 | with: 58 | name: artifact 59 | path: tested_requirements 60 | - name: Commit and push new requirements files 61 | run: | 62 | git config user.name github-actions 63 | git config user.email github-actions@github.com 64 | git add tested_requirements 65 | git commit -m "Update requirements files" -a 66 | git push 67 | -------------------------------------------------------------------------------- /.github/workflows/jira_close.yml: -------------------------------------------------------------------------------- 1 | name: Jira closure 2 | 3 | on: 4 | issues: 5 | types: [closed, deleted] 6 | 7 | jobs: 8 | close-issue: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v4 13 | with: 14 | persist-credentials: false 15 | repository: snowflakedb/gh-actions 16 | ref: jira_v1 17 | token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets 18 | path: . 19 | - name: Jira login 20 | uses: atlassian/gajira-login@v3 21 | env: 22 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 23 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 24 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 25 | - name: Extract issue from title 26 | id: extract 27 | env: 28 | TITLE: "${{ github.event.issue.title }}" 29 | run: | 30 | jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') 31 | echo ::set-output name=jira::$jira 32 | - name: Close issue 33 | uses: ./jira/gajira-close 34 | if: startsWith(steps.extract.outputs.jira, 'SNOW-') 35 | with: 36 | issue: "${{ steps.extract.outputs.jira }}" 37 | -------------------------------------------------------------------------------- /.github/workflows/jira_comment.yml: -------------------------------------------------------------------------------- 1 | name: Jira comment 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | jobs: 8 | comment-issue: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Jira login 12 | uses: atlassian/gajira-login@v3 13 | env: 14 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 15 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 16 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 17 | - name: Extract issue from title 18 | id: extract 19 | env: 20 | TITLE: "${{ github.event.issue.title }}" 21 | run: | 22 | jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') 23 | echo ::set-output name=jira::$jira 24 | - name: Comment on issue 25 | uses: atlassian/gajira-comment@v3 26 | if: startsWith(steps.extract.outputs.jira, 'SNOW-') 27 | with: 28 | issue: "${{ steps.extract.outputs.jira }}" 29 | comment: "${{ github.event.comment.user.login }} commented:\n\n${{ github.event.comment.body }}\n\n${{ github.event.comment.html_url }}" 30 | -------------------------------------------------------------------------------- /.github/workflows/jira_issue.yml: -------------------------------------------------------------------------------- 1 | name: Jira creation 2 | 3 | on: 4 | issues: 5 | types: [opened] 6 | issue_comment: 7 | types: [created] 8 | 9 | jobs: 10 | create-issue: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | issues: write 14 | if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]')) 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v4 18 | with: 19 | persist-credentials: false 20 | repository: snowflakedb/gh-actions 21 | ref: jira_v1 22 | token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets 23 | path: . 24 | 25 | - name: Login 26 | uses: atlassian/gajira-login@v3 27 | env: 28 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 29 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 30 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 31 | 32 | - name: Create JIRA Ticket 33 | id: create 34 | uses: atlassian/gajira-create@v3 35 | with: 36 | project: SNOW 37 | issuetype: Bug 38 | summary: '${{ github.event.issue.title }}' 39 | description: | 40 | ${{ github.event.issue.body }} \\ \\ _Created from GitHub Action_ for ${{ github.event.issue.html_url }} 41 | fields: '{"customfield_11401":{"id":"14723"},"assignee":{"id":"712020:e527ae71-55cc-4e02-9217-1ca4ca8028a2"},"components":[{"id":"16161"},{"id":"16403"}], "labels": ["oss"], "priority": {"id": "10001"} }' 42 | 43 | - name: Update GitHub Issue 44 | uses: ./jira/gajira-issue-update 45 | env: 46 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 47 | with: 48 | issue_number: "{{ event.issue.id }}" 49 | owner: "{{ event.repository.owner.login }}" 50 | name: "{{ event.repository.name }}" 51 | jira: "${{ steps.create.outputs.issue }}" 52 | -------------------------------------------------------------------------------- /.github/workflows/parameters/parameters_aws.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-sqlalchemy/426395ff65c4d5cfc09a6f82f34ccc07d848aa8e/.github/workflows/parameters/parameters_aws.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/parameters_azure.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-sqlalchemy/426395ff65c4d5cfc09a6f82f34ccc07d848aa8e/.github/workflows/parameters/parameters_azure.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/parameters_gcp.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-sqlalchemy/426395ff65c4d5cfc09a6f82f34ccc07d848aa8e/.github/workflows/parameters/parameters_gcp.py.gpg -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: write 17 | id-token: write 18 | 19 | jobs: 20 | deploy: 21 | 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | - uses: actions/checkout@v4 26 | with: 27 | persist-credentials: false 28 | - name: Set up Python 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: '3.x' 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install -U uv 35 | python -m uv pip install -U hatch 36 | - name: Build package 37 | run: python -m hatch build --clean 38 | - name: List artifacts 39 | run: ls ./dist 40 | - name: Install sigstore 41 | run: python -m pip install sigstore 42 | - name: Signing 43 | run: | 44 | for dist in dist/*; do 45 | dist_base="$(basename "${dist}")" 46 | echo "dist: ${dist}" 47 | echo "dist_base: ${dist_base}" 48 | python -m \ 49 | sigstore sign "${dist}" \ 50 | --output-signature "${dist_base}.sig" \ 51 | --output-certificate "${dist_base}.crt" \ 52 | --bundle "${dist_base}.sigstore" 53 | 54 | # Verify using `.sig` `.crt` pair; 55 | python -m \ 56 | sigstore verify identity "${dist}" \ 57 | --signature "${dist_base}.sig" \ 58 | --cert "${dist_base}.crt" \ 59 | --cert-oidc-issuer https://token.actions.githubusercontent.com \ 60 | --cert-identity ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/.github/workflows/python-publish.yml@${GITHUB_REF} 61 | 62 | # Verify using `.sigstore` bundle; 63 | python -m \ 64 | sigstore verify identity "${dist}" \ 65 | --bundle "${dist_base}.sigstore" \ 66 | --cert-oidc-issuer https://token.actions.githubusercontent.com \ 67 | --cert-identity ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/.github/workflows/python-publish.yml@${GITHUB_REF} 68 | done 69 | - name: List artifacts after sign 70 | run: ls ./dist 71 | - name: Copy files to release 72 | run: | 73 | gh release upload ${{ github.event.release.tag_name }} *.sigstore 74 | gh release upload ${{ github.event.release.tag_name }} *.sig 75 | gh release upload ${{ github.event.release.tag_name }} *.crt 76 | env: 77 | GITHUB_TOKEN: ${{ github.TOKEN }} 78 | - name: Publish package 79 | uses: pypa/gh-action-pypi-publish@release/v1 80 | with: 81 | user: __token__ 82 | password: ${{ secrets.PYPI_API_TOKEN }} 83 | -------------------------------------------------------------------------------- /.github/workflows/semgrep.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Run semgrep checks 3 | 4 | on: 5 | pull_request: 6 | branches: [main] 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | run-semgrep-reusable-workflow: 13 | uses: snowflakedb/reusable-workflows/.github/workflows/semgrep-v2.yml@main 14 | secrets: 15 | token: ${{ secrets.SEMGREP_APP_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/snyk-issue.yml: -------------------------------------------------------------------------------- 1 | name: Snyk Issue 2 | 3 | on: 4 | schedule: 5 | - cron: '* */12 * * *' 6 | 7 | permissions: 8 | contents: read 9 | issues: write 10 | pull-requests: write 11 | 12 | concurrency: snyk-issue 13 | 14 | jobs: 15 | snyk: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout Action 19 | uses: actions/checkout@v4 20 | with: 21 | persist-credentials: false 22 | repository: snowflakedb/whitesource-actions 23 | token: ${{ secrets.whitesource_action_token }} 24 | path: whitesource-actions 25 | - name: Set Env 26 | run: echo "repo=${{ github.event.repository.name }}" >> $GITHUB_ENV 27 | - name: Jira Creation 28 | uses: ./whitesource-actions/snyk-issue 29 | with: 30 | snyk_org: ${{ secrets.snyk_org_id_public_repo }} 31 | snyk_token: ${{ secrets.snyk_github_integration_token_public_repo }} 32 | jira_token: ${{ secrets.jira_token_public_repo }} 33 | env: 34 | gh_token: ${{ secrets.github_token }} 35 | -------------------------------------------------------------------------------- /.github/workflows/snyk-pr.yml: -------------------------------------------------------------------------------- 1 | name: Snyk PR 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | 7 | permissions: 8 | contents: read 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | snyk: 14 | runs-on: ubuntu-latest 15 | if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }} 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | persist-credentials: false 21 | ref: ${{ github.event.pull_request.head.ref }} 22 | fetch-depth: 0 23 | 24 | - name: Checkout Action 25 | uses: actions/checkout@v4 26 | with: 27 | persist-credentials: false 28 | repository: snowflakedb/whitesource-actions 29 | token: ${{ secrets.whitesource_action_token }} 30 | path: whitesource-actions 31 | 32 | - name: Snyk Pull Request Scan Check 33 | uses: ./whitesource-actions/snyk-pr 34 | env: 35 | pr_title: ${{ github.event.pull_request.title }} 36 | with: 37 | jira_token: ${{ secrets.jira_token_public_repo }} 38 | gh_token: ${{ secrets.github_token }} 39 | amend: false 40 | -------------------------------------------------------------------------------- /.github/workflows/stale_issue_bot.yml: -------------------------------------------------------------------------------- 1 | name: Close Stale Issues 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | staleDays: 6 | required: true 7 | 8 | 9 | jobs: 10 | stale: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/stale@v9 14 | with: 15 | close-issue-message: 'To clean up and re-prioritize bugs and feature requests we are closing all issues older than 6 months as of Apr 1, 2023. If there are any issues or feature requests that you would like us to address, please re-create them. For urgent issues, opening a support case with this link [Snowflake Community](https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge) is the fastest way to get a response' 16 | days-before-issue-stale: ${{ inputs.staleDays }} 17 | days-before-pr-stale: -1 18 | # Stale issues are closed immediately 19 | days-before-issue-close: 0 20 | days-before-pr-close: -1 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tests/parameters*.py 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | #*.spec # used to build snowsql 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv*/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | 93 | # vim 94 | *.swp 95 | 96 | # others 97 | .tox/ 98 | generated_version.py 99 | *coverage.xml 100 | .DS_Store 101 | 102 | # Editor specific 103 | .idea/ 104 | .vscode/ 105 | *.code-workspace 106 | 107 | # WhiteSource Scan 108 | wss-*agent.config 109 | wss-unified-agent.jar 110 | whitesource/ 111 | .idea 112 | Python 113 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-sqlalchemy/426395ff65c4d5cfc09a6f82f34ccc07d848aa8e/.gitmodules -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: '^(.*egg.info.*|.*/parameters.py).*$' 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.5.0 5 | hooks: 6 | - id: trailing-whitespace 7 | exclude: '\.ambr$' 8 | - id: end-of-file-fixer 9 | - id: check-yaml 10 | exclude: .github/repo_meta.yaml 11 | - id: debug-statements 12 | - repo: https://github.com/PyCQA/isort 13 | rev: 5.13.2 14 | hooks: 15 | - id: isort 16 | - repo: https://github.com/asottile/pyupgrade 17 | rev: v3.15.1 18 | hooks: 19 | - id: pyupgrade 20 | args: [--py37-plus] 21 | - repo: https://github.com/psf/black 22 | rev: 24.2.0 23 | hooks: 24 | - id: black 25 | args: 26 | - --safe 27 | language_version: python3 28 | - repo: https://github.com/Lucas-C/pre-commit-hooks.git 29 | rev: v1.5.5 30 | hooks: 31 | - id: insert-license 32 | name: insert-py-license 33 | files: > 34 | (?x)^( 35 | src/snowflake/sqlalchemy/.*\.py| 36 | tests/.*\.py| 37 | setup.py 38 | )$ 39 | args: 40 | - --license-filepath 41 | - license_header.txt 42 | - repo: https://github.com/pycqa/flake8 43 | rev: 7.0.0 44 | hooks: 45 | - id: flake8 46 | additional_dependencies: 47 | - flake8-bugbear 48 | - repo: local 49 | hooks: 50 | - id: requirements-update 51 | name: "Update dependencies from pyproject.toml to snyk/requirements.txt" 52 | language: system 53 | entry: python snyk/update_requirements.py 54 | files: ^pyproject.toml$ 55 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md *.py 2 | prune tests 3 | -------------------------------------------------------------------------------- /ci/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Build snowflake-sqlalchemy 4 | set -o pipefail 5 | 6 | PYTHON="python3.8" 7 | THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 8 | SQLALCHEMY_DIR="$(dirname "${THIS_DIR}")" 9 | DIST_DIR="${SQLALCHEMY_DIR}/dist" 10 | 11 | cd "$SQLALCHEMY_DIR" 12 | # Clean up previously built DIST_DIR 13 | if [ -d "${DIST_DIR}" ]; then 14 | echo "[WARN] ${DIST_DIR} already existing, deleting it..." 15 | rm -rf "${DIST_DIR}" 16 | fi 17 | 18 | # Constants and setup 19 | export PATH=$PATH:$HOME/.local/bin 20 | 21 | echo "[Info] Building snowflake-sqlalchemy with $PYTHON" 22 | # Clean up possible build artifacts 23 | rm -rf build generated_version.py 24 | export UV_NO_CACHE=true 25 | ${PYTHON} -m pip install uv hatch 26 | ${PYTHON} -m hatch build 27 | -------------------------------------------------------------------------------- /ci/build_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Build snowflake-sqlalchemy universal wheel in Docker 4 | set -o pipefail 5 | 6 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 7 | source $THIS_DIR/set_base_image.sh 8 | SQLALCHEMY_DIR="$( dirname "${THIS_DIR}")" 9 | 10 | mkdir -p $SQLALCHEMY_DIR/dist 11 | cd $THIS_DIR/docker/sqlalchemy_build 12 | 13 | arch=$(uname -p) 14 | 15 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014 16 | 17 | if [[ "$arch" == "aarch64" ]]; then 18 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-arm64 19 | else 20 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64 21 | fi 22 | 23 | echo "[Info] Building snowflake-sqlalchemy" 24 | docker run \ 25 | --rm \ 26 | -e TERM=vt102 \ 27 | -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ 28 | -e LOCAL_USER_ID=$(id -u ${USER}) \ 29 | --mount type=bind,source="${SQLALCHEMY_DIR}",target=/home/user/snowflake-sqlalchemy \ 30 | $(docker build --pull --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" -q .) \ 31 | /home/user/snowflake-sqlalchemy/ci/build.sh $1 32 | -------------------------------------------------------------------------------- /ci/docker/sqlalchemy_build/Dockerfile: -------------------------------------------------------------------------------- 1 | # We use manylinux1 base image because pyarrow_manylinux2010 has a bug and wheel failed to be audited 2 | ARG BASE_IMAGE=quay.io/pypa/manylinux2014_x86_64 3 | FROM $BASE_IMAGE 4 | 5 | # This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ 6 | ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 7 | ENV GOSU_PATH $GOSU_URL 8 | RUN curl -o /usr/local/bin/gosu -SL $GOSU_PATH \ 9 | && chmod +x /usr/local/bin/gosu 10 | 11 | COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh 12 | RUN chmod +x /usr/local/bin/entrypoint.sh 13 | 14 | WORKDIR /home/user 15 | RUN chmod 777 /home/user 16 | 17 | ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin:/opt/python/cp311-cp311/bin" 18 | 19 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 20 | -------------------------------------------------------------------------------- /ci/docker/sqlalchemy_build/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add local user 4 | # Either use the LOCAL_USER_ID if passed in at runtime or 5 | # fallback 6 | 7 | USER_ID=${LOCAL_USER_ID:-9001} 8 | 9 | echo "Starting with UID : $USER_ID" 10 | useradd --shell /bin/bash -u $USER_ID -o -c "" -m user 11 | export HOME=/home/user 12 | 13 | /usr/local/bin/gosu user "$@" 14 | -------------------------------------------------------------------------------- /ci/set_base_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Use the internal docker registry if running on Jenkins 4 | # 5 | set -o pipefail 6 | INTERNAL_REPO=nexus.int.snowflakecomputing.com:8086 7 | if [[ -n "$NEXUS_PASSWORD" ]]; then 8 | echo "[INFO] Pull docker images from $INTERNAL_REPO" 9 | NEXUS_USER=${USERNAME:-jenkins} 10 | docker login --username "$NEXUS_USER" --password "$NEXUS_PASSWORD" $INTERNAL_REPO 11 | export BASE_IMAGE_MANYLINUX2010=${INTERNAL_REPO}/docker/manylinux2010_x86_64 12 | export BASE_IMAGE_MANYLINUX2014=${INTERNAL_REPO}/docker/manylinux2014_x86_64 13 | else 14 | echo "[INFO] Pull docker images from public registry" 15 | export BASE_IMAGE_MANYLINUX2010=quay.io/pypa/manylinux2010_x86_64 16 | export BASE_IMAGE_MANYLINUX2014=quay.io/pypa/manylinux2014_x86_64 17 | fi 18 | -------------------------------------------------------------------------------- /ci/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Test Snowflake SQLAlchemy 4 | # 5 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 6 | SQLALCHEMY_DIR="$( dirname "${THIS_DIR}")" 7 | PARAMETERS_DIR="${SQLALCHEMY_DIR}/.github/workflows/parameters" 8 | 9 | cd "${SQLALCHEMY_DIR}" 10 | 11 | # Check Requirements 12 | if [ -z "${PARAMETERS_SECRET}" ]; then 13 | echo "Missing PARAMETERS_SECRET, failing..." 14 | exit 1 15 | fi 16 | 17 | # Decrypt parameters file 18 | PARAMS_FILE="${PARAMETERS_DIR}/parameters_aws.py.gpg" 19 | [ ${cloud_provider} == azure ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_azure.py.gpg" 20 | [ ${cloud_provider} == gcp ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_gcp.py.gpg" 21 | gpg --quiet --batch --yes --decrypt --passphrase="${PARAMETERS_SECRET}" ${PARAMS_FILE} > tests/parameters.py 22 | 23 | # Download artifacts made by build 24 | aws s3 cp --recursive --only-show-errors s3://sfc-eng-jenkins/repository/sqlalchemy/linux/${client_git_branch}/${client_git_commit}/ dist 25 | 26 | echo "[Info] Going to run regular tests for Python ${python_env}" 27 | ${THIS_DIR}/test_docker.sh ${python_env} 28 | -------------------------------------------------------------------------------- /ci/test_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # Test Snowflake SQLAlchemy in Docker 3 | # NOTES: 4 | # - By default this script runs Python 3.7 tests, as these are installed in dev vms 5 | # - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.7 3.8"` 6 | 7 | set -o pipefail 8 | 9 | # In case this is ran from dev-vm 10 | PYTHON_ENV=${1:-3.7} 11 | 12 | # Set constants 13 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 14 | SQLALCHEMY_DIR="$( dirname "${THIS_DIR}")" 15 | WORKSPACE=${WORKSPACE:-$SQLALCHEMY_DIR} 16 | source $THIS_DIR/set_base_image.sh 17 | 18 | cd $THIS_DIR/docker/sqlalchemy_build 19 | 20 | CONTAINER_NAME=test_sqlalchemy 21 | 22 | echo "[Info] Building docker image" 23 | arch=$(uname -p) 24 | 25 | echo "[Info] Building docker image" 26 | if [[ "$arch" == "aarch64" ]]; then 27 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014AARCH64 28 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-arm64 29 | else 30 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014 31 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 32 | fi 33 | 34 | echo "[Info] Start building docker image and testing" 35 | 36 | user_id=$(id -u ${USER}) 37 | docker run \ 38 | --rm \ 39 | --network=host \ 40 | -e TERM=vt102 \ 41 | -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ 42 | -e OPENSSL_FIPS=1 \ 43 | -e LOCAL_USER_ID=${user_id} \ 44 | -e AWS_ACCESS_KEY_ID \ 45 | -e AWS_SECRET_ACCESS_KEY \ 46 | -e SF_REGRESS_LOGS \ 47 | -e SF_PROJECT_ROOT \ 48 | -e cloud_provider \ 49 | -e JENKINS_HOME \ 50 | -e GITHUB_ACTIONS \ 51 | --mount type=bind,source="${SQLALCHEMY_DIR}",target=/home/user/snowflake-sqlalchemy \ 52 | $(docker build --pull --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" -q .) \ 53 | /home/user/snowflake-sqlalchemy/ci/test_linux.sh ${PYTHON_ENV} 54 | -------------------------------------------------------------------------------- /ci/test_linux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Test Snowflake SQLAlchemy in Linux 4 | # NOTES: 5 | # - Versions to be tested should be passed in as the first argument, e.g: "3.7 3.8". If omitted 3.7-3.11 will be assumed. 6 | # - This script assumes that ../dist/repaired_wheels has the wheel(s) built for all versions to be tested 7 | # - This is the script that test_docker.sh runs inside of the docker container 8 | 9 | PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11}" 10 | THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 11 | SQLALCHEMY_DIR="$(dirname "${THIS_DIR}")" 12 | 13 | # Install one copy of tox 14 | python3 -m pip install -U tox 15 | 16 | # Run tests 17 | cd $SQLALCHEMY_DIR 18 | for PYTHON_VERSION in ${PYTHON_VERSIONS}; do 19 | echo "[Info] Testing with ${PYTHON_VERSION}" 20 | SHORT_VERSION=$(python3 -c "print('${PYTHON_VERSION}'.replace('.', ''))") 21 | SQLALCHEMY_WHL=$(ls $SQLALCHEMY_DIR/dist/snowflake_sqlalchemy-*-py3-none-any.whl | sort -r | head -n 1) 22 | TEST_ENVLIST=fix_lint,py${SHORT_VERSION}-ci,py${SHORT_VERSION}-coverage 23 | echo "[Info] Running tox for ${TEST_ENVLIST}" 24 | python3 -m tox -e ${TEST_ENVLIST} --installpkg ${SQLALCHEMY_WHL} 25 | done 26 | -------------------------------------------------------------------------------- /license_header.txt: -------------------------------------------------------------------------------- 1 | 2 | Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "snowflake-sqlalchemy" 7 | dynamic = ["version"] 8 | description = "Snowflake SQLAlchemy Dialect" 9 | readme = "README.md" 10 | license = "Apache-2.0" 11 | requires-python = ">=3.8" 12 | authors = [ 13 | { name = "Snowflake Inc.", email = "triage-snowpark-python-api-dl@snowflake.com" }, 14 | ] 15 | keywords = ["Snowflake", "analytics", "cloud", "database", "db", "warehouse"] 16 | classifiers = [ 17 | "Development Status :: 5 - Production/Stable", 18 | "Environment :: Console", 19 | "Environment :: Other Environment", 20 | "Intended Audience :: Developers", 21 | "Intended Audience :: Education", 22 | "Intended Audience :: Information Technology", 23 | "Intended Audience :: System Administrators", 24 | "License :: OSI Approved :: Apache Software License", 25 | "Operating System :: OS Independent", 26 | "Programming Language :: Python :: 3", 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Programming Language :: Python :: 3.8", 29 | "Programming Language :: Python :: 3.9", 30 | "Programming Language :: Python :: 3.10", 31 | "Programming Language :: Python :: 3.11", 32 | "Programming Language :: Python :: 3.12", 33 | "Programming Language :: SQL", 34 | "Topic :: Database", 35 | "Topic :: Scientific/Engineering :: Information Analysis", 36 | "Topic :: Software Development", 37 | "Topic :: Software Development :: Libraries", 38 | "Topic :: Software Development :: Libraries :: Application Frameworks", 39 | "Topic :: Software Development :: Libraries :: Python Modules", 40 | ] 41 | dependencies = ["SQLAlchemy>=1.4.19", "snowflake-connector-python<4.0.0"] 42 | 43 | [tool.hatch.version] 44 | path = "src/snowflake/sqlalchemy/version.py" 45 | 46 | [project.optional-dependencies] 47 | development = [ 48 | "pre-commit", 49 | "pytest", 50 | "setuptools", 51 | "pytest-cov", 52 | "pytest-timeout", 53 | "pytest-rerunfailures", 54 | "pytz", 55 | "numpy", 56 | "mock", 57 | "syrupy==4.6.1", 58 | ] 59 | pandas = ["snowflake-connector-python[pandas]"] 60 | 61 | [project.entry-points."sqlalchemy.dialects"] 62 | snowflake = "snowflake.sqlalchemy:dialect" 63 | 64 | [project.urls] 65 | Changelog = "https://github.com/snowflakedb/snowflake-sqlalchemy/blob/main/DESCRIPTION.md" 66 | Documentation = "https://docs.snowflake.com/en/user-guide/sqlalchemy.html" 67 | Homepage = "https://www.snowflake.com/" 68 | Issues = "https://github.com/snowflakedb/snowflake-sqlalchemy/issues" 69 | Source = "https://github.com/snowflakedb/snowflake-sqlalchemy" 70 | 71 | [tool.hatch.build.targets.sdist] 72 | exclude = ["/.github"] 73 | 74 | [tool.hatch.build.targets.wheel] 75 | packages = ["src/snowflake"] 76 | 77 | [tool.hatch.envs.default] 78 | path = ".venv" 79 | type = "virtual" 80 | extra-dependencies = ["SQLAlchemy>=1.4.19,<2.1.0"] 81 | features = ["development", "pandas"] 82 | python = "3.8" 83 | installer = "uv" 84 | 85 | [tool.hatch.envs.sa14] 86 | extra-dependencies = ["SQLAlchemy>=1.4.19,<2.0.0"] 87 | features = ["development", "pandas"] 88 | python = "3.8" 89 | 90 | [tool.hatch.envs.sa14.scripts] 91 | test-dialect = "pytest --ignore_v20_test -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml --ignore=tests/sqlalchemy_test_suite tests/" 92 | test-dialect-compatibility = "pytest --ignore_v20_test -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml tests/sqlalchemy_test_suite" 93 | test-dialect-aws = "pytest --ignore_v20_test -m \"aws\" -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml --ignore=tests/sqlalchemy_test_suite tests/" 94 | 95 | [tool.hatch.envs.default.env-vars] 96 | COVERAGE_FILE = "coverage.xml" 97 | SQLACHEMY_WARN_20 = "1" 98 | 99 | [tool.hatch.envs.default.scripts] 100 | check = "pre-commit run --all-files" 101 | test-dialect = "pytest -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml --ignore=tests/sqlalchemy_test_suite tests/" 102 | test-dialect-compatibility = "pytest -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml tests/sqlalchemy_test_suite" 103 | test-dialect-aws = "pytest -m \"aws\" -ra -vvv --tb=short --cov snowflake.sqlalchemy --cov-append --junitxml ./junit.xml --ignore=tests/sqlalchemy_test_suite tests/" 104 | gh-cache-sum = "python -VV | sha256sum | cut -d' ' -f1" 105 | check-import = "python -c 'import snowflake.sqlalchemy; print(snowflake.sqlalchemy.__version__)'" 106 | 107 | [[tool.hatch.envs.release.matrix]] 108 | python = ["3.8", "3.9", "3.10", "3.11", "3.12"] 109 | features = ["development", "pandas"] 110 | 111 | [tool.hatch.envs.release.scripts] 112 | test-dialect = "pytest -ra -vvv --tb=short --ignore=tests/sqlalchemy_test_suite tests/" 113 | test-compatibility = "pytest -ra -vvv --tb=short tests/sqlalchemy_test_suite tests/" 114 | 115 | [tool.ruff] 116 | line-length = 88 117 | 118 | [tool.black] 119 | line-length = 88 120 | 121 | [tool.pytest.ini_options] 122 | addopts = "-m 'not feature_max_lob_size and not aws and not requires_external_volume'" 123 | markers = [ 124 | # Optional dependency groups markers 125 | "lambda: AWS lambda tests", 126 | "pandas: tests for pandas integration", 127 | "sso: tests for sso optional dependency integration", 128 | # Cloud provider markers 129 | "aws: tests for Amazon Cloud storage", 130 | "azure: tests for Azure Cloud storage", 131 | "gcp: tests for Google Cloud storage", 132 | # Test type markers 133 | "integ: integration tests", 134 | "unit: unit tests", 135 | "skipolddriver: skip for old driver tests", 136 | # Other markers 137 | "timeout: tests that need a timeout time", 138 | "internal: tests that could but should only run on our internal CI", 139 | "requires_external_volume: tests that needs a external volume to be executed", 140 | "external: tests that could but should only run on our external CI", 141 | "feature_max_lob_size: tests that could but should only run on our external CI", 142 | "feature_v20: tests that could but should only run on SqlAlchemy v20", 143 | ] 144 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [sqla_testing] 2 | requirement_cls=snowflake.sqlalchemy.requirements:Requirements 3 | profile_file=tests/profiles.txt 4 | -------------------------------------------------------------------------------- /snyk/requirements.txt: -------------------------------------------------------------------------------- 1 | SQLAlchemy>=1.4.19 2 | snowflake-connector-python<4.0.0 3 | -------------------------------------------------------------------------------- /snyk/requiremtnts.txt: -------------------------------------------------------------------------------- 1 | snowflake-connector-python<4.0.0 2 | SQLAlchemy>=1.4.19,<2.1.0 3 | -------------------------------------------------------------------------------- /snyk/update_requirements.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import tomlkit 4 | 5 | 6 | def sync(): 7 | pyproject = tomlkit.loads(Path("pyproject.toml").read_text()) 8 | snyk_reqiurements = Path("snyk/requirements.txt") 9 | dependencies = pyproject.get("project", {}).get("dependencies", []) 10 | 11 | with snyk_reqiurements.open("w") as fh: 12 | fh.write("\n".join(dependencies)) 13 | fh.write("\n") 14 | 15 | 16 | if __name__ == "__main__": 17 | sync() 18 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import sys 6 | 7 | if sys.version_info < (3, 8): 8 | import importlib_metadata 9 | else: 10 | import importlib.metadata as importlib_metadata 11 | 12 | from sqlalchemy.types import ( # noqa 13 | BIGINT, 14 | BINARY, 15 | BOOLEAN, 16 | CHAR, 17 | DATE, 18 | DATETIME, 19 | DECIMAL, 20 | FLOAT, 21 | INT, 22 | INTEGER, 23 | REAL, 24 | SMALLINT, 25 | TIME, 26 | TIMESTAMP, 27 | VARCHAR, 28 | ) 29 | 30 | from . import base, snowdialect # noqa 31 | from .custom_commands import ( # noqa 32 | AWSBucket, 33 | AzureContainer, 34 | CopyFormatter, 35 | CopyIntoStorage, 36 | CreateFileFormat, 37 | CreateStage, 38 | CSVFormatter, 39 | ExternalStage, 40 | JSONFormatter, 41 | MergeInto, 42 | PARQUETFormatter, 43 | ) 44 | from .custom_types import ( # noqa 45 | ARRAY, 46 | BYTEINT, 47 | CHARACTER, 48 | DEC, 49 | DOUBLE, 50 | FIXED, 51 | GEOGRAPHY, 52 | GEOMETRY, 53 | MAP, 54 | NUMBER, 55 | OBJECT, 56 | STRING, 57 | TEXT, 58 | TIMESTAMP_LTZ, 59 | TIMESTAMP_NTZ, 60 | TIMESTAMP_TZ, 61 | TINYINT, 62 | VARBINARY, 63 | VARIANT, 64 | ) 65 | from .sql.custom_schema import ( # noqa 66 | DynamicTable, 67 | HybridTable, 68 | IcebergTable, 69 | SnowflakeTable, 70 | ) 71 | from .sql.custom_schema.options import ( # noqa 72 | AsQueryOption, 73 | ClusterByOption, 74 | IdentifierOption, 75 | KeywordOption, 76 | LiteralOption, 77 | SnowflakeKeyword, 78 | TableOptionKey, 79 | TargetLagOption, 80 | TimeUnit, 81 | ) 82 | from .util import _url as URL # noqa 83 | 84 | base.dialect = dialect = snowdialect.dialect 85 | 86 | __version__ = importlib_metadata.version("snowflake-sqlalchemy") 87 | 88 | _custom_types = ( 89 | "BIGINT", 90 | "BINARY", 91 | "BOOLEAN", 92 | "CHAR", 93 | "DATE", 94 | "DATETIME", 95 | "DECIMAL", 96 | "FLOAT", 97 | "INT", 98 | "INTEGER", 99 | "REAL", 100 | "SMALLINT", 101 | "TIME", 102 | "TIMESTAMP", 103 | "URL", 104 | "VARCHAR", 105 | "ARRAY", 106 | "BYTEINT", 107 | "CHARACTER", 108 | "DEC", 109 | "DOUBLE", 110 | "FIXED", 111 | "GEOGRAPHY", 112 | "GEOMETRY", 113 | "OBJECT", 114 | "NUMBER", 115 | "STRING", 116 | "TEXT", 117 | "TIMESTAMP_LTZ", 118 | "TIMESTAMP_TZ", 119 | "TIMESTAMP_NTZ", 120 | "TINYINT", 121 | "VARBINARY", 122 | "VARIANT", 123 | "MAP", 124 | ) 125 | 126 | _custom_commands = ( 127 | "MergeInto", 128 | "CSVFormatter", 129 | "JSONFormatter", 130 | "PARQUETFormatter", 131 | "CopyFormatter", 132 | "CopyIntoStorage", 133 | "AWSBucket", 134 | "AzureContainer", 135 | "ExternalStage", 136 | "CreateStage", 137 | "CreateFileFormat", 138 | ) 139 | 140 | _custom_tables = ("HybridTable", "DynamicTable", "IcebergTable", "SnowflakeTable") 141 | 142 | _custom_table_options = ( 143 | "AsQueryOption", 144 | "TargetLagOption", 145 | "LiteralOption", 146 | "IdentifierOption", 147 | "KeywordOption", 148 | "ClusterByOption", 149 | ) 150 | 151 | _enums = ( 152 | "TimeUnit", 153 | "TableOptionKey", 154 | "SnowflakeKeyword", 155 | ) 156 | __all__ = ( 157 | *_custom_types, 158 | *_custom_commands, 159 | *_custom_tables, 160 | *_custom_table_options, 161 | *_enums, 162 | ) 163 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/_constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from .version import VERSION 5 | 6 | # parameters needed for usage tracking 7 | PARAM_APPLICATION = "application" 8 | PARAM_INTERNAL_APPLICATION_NAME = "internal_application_name" 9 | PARAM_INTERNAL_APPLICATION_VERSION = "internal_application_version" 10 | 11 | APPLICATION_NAME = "SnowflakeSQLAlchemy" 12 | SNOWFLAKE_SQLALCHEMY_VERSION = VERSION 13 | DIALECT_NAME = "snowflake" 14 | NOT_NULL = "NOT NULL" 15 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/compat.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | from __future__ import annotations 4 | 5 | import functools 6 | from typing import Callable 7 | 8 | from sqlalchemy import __version__ as SA_VERSION 9 | from sqlalchemy import util 10 | 11 | string_types = (str,) 12 | returns_unicode = util.symbol("RETURNS_UNICODE") 13 | 14 | IS_VERSION_20 = tuple(int(v) for v in SA_VERSION.split(".")) >= (2, 0, 0) 15 | 16 | 17 | def args_reducer(positions_to_drop: tuple): 18 | """Removes args at positions provided in tuple positions_to_drop. 19 | 20 | For example tuple (3, 5) will remove items at third and fifth position. 21 | Keep in mind that on class methods first postion is cls or self. 22 | """ 23 | 24 | def fn_wrapper(fn: Callable): 25 | @functools.wraps(fn) 26 | def wrapper(*args): 27 | reduced_args = args 28 | if not IS_VERSION_20: 29 | reduced_args = tuple( 30 | arg for idx, arg in enumerate(args) if idx not in positions_to_drop 31 | ) 32 | fn(*reduced_args) 33 | 34 | return wrapper 35 | 36 | return fn_wrapper 37 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/custom_types.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Optional, Tuple, Union 5 | 6 | import sqlalchemy.types as sqltypes 7 | import sqlalchemy.util as util 8 | from sqlalchemy.types import TypeEngine 9 | 10 | TEXT = sqltypes.VARCHAR 11 | CHARACTER = sqltypes.CHAR 12 | DEC = sqltypes.DECIMAL 13 | DOUBLE = sqltypes.FLOAT 14 | FIXED = sqltypes.DECIMAL 15 | NUMBER = sqltypes.DECIMAL 16 | BYTEINT = sqltypes.SMALLINT 17 | STRING = sqltypes.VARCHAR 18 | TINYINT = sqltypes.SMALLINT 19 | VARBINARY = sqltypes.BINARY 20 | 21 | 22 | def _process_float(value): 23 | if value == float("inf"): 24 | return "inf" 25 | elif value == float("-inf"): 26 | return "-inf" 27 | elif value is not None: 28 | return float(value) 29 | return value 30 | 31 | 32 | class SnowflakeType(sqltypes.TypeEngine): 33 | def _default_dialect(self): 34 | # Get around circular import 35 | return __import__("snowflake.sqlalchemy").sqlalchemy.dialect() 36 | 37 | 38 | class VARIANT(SnowflakeType): 39 | __visit_name__ = "VARIANT" 40 | 41 | 42 | class StructuredType(SnowflakeType): 43 | def __init__(self, is_semi_structured: bool = False): 44 | self.is_semi_structured = is_semi_structured 45 | super().__init__() 46 | 47 | 48 | class MAP(StructuredType): 49 | __visit_name__ = "MAP" 50 | 51 | def __init__( 52 | self, 53 | key_type: sqltypes.TypeEngine, 54 | value_type: sqltypes.TypeEngine, 55 | not_null: bool = False, 56 | ): 57 | self.key_type = key_type 58 | self.value_type = value_type 59 | self.not_null = not_null 60 | super().__init__() 61 | 62 | 63 | class OBJECT(StructuredType): 64 | __visit_name__ = "OBJECT" 65 | 66 | def __init__(self, **items_types: Union[TypeEngine, Tuple[TypeEngine, bool]]): 67 | for key, value in items_types.items(): 68 | if not isinstance(value, tuple): 69 | items_types[key] = (value, False) 70 | 71 | self.items_types = items_types 72 | self.is_semi_structured = len(items_types) == 0 73 | super().__init__() 74 | 75 | def __repr__(self): 76 | quote_char = "'" 77 | return "OBJECT(%s)" % ", ".join( 78 | [ 79 | f"{repr(key).strip(quote_char)}={repr(value)}" 80 | for key, value in self.items_types.items() 81 | ] 82 | ) 83 | 84 | 85 | class ARRAY(StructuredType): 86 | __visit_name__ = "SNOWFLAKE_ARRAY" 87 | 88 | def __init__( 89 | self, 90 | value_type: Optional[sqltypes.TypeEngine] = None, 91 | not_null: bool = False, 92 | ): 93 | self.value_type = value_type 94 | self.not_null = not_null 95 | super().__init__(is_semi_structured=value_type is None) 96 | 97 | 98 | class TIMESTAMP_TZ(SnowflakeType): 99 | __visit_name__ = "TIMESTAMP_TZ" 100 | 101 | 102 | class TIMESTAMP_LTZ(SnowflakeType): 103 | __visit_name__ = "TIMESTAMP_LTZ" 104 | 105 | 106 | class TIMESTAMP_NTZ(SnowflakeType): 107 | __visit_name__ = "TIMESTAMP_NTZ" 108 | 109 | 110 | class GEOGRAPHY(SnowflakeType): 111 | __visit_name__ = "GEOGRAPHY" 112 | 113 | 114 | class GEOMETRY(SnowflakeType): 115 | __visit_name__ = "GEOMETRY" 116 | 117 | 118 | class _CUSTOM_Date(SnowflakeType, sqltypes.Date): 119 | def literal_processor(self, dialect): 120 | def process(value): 121 | if value is not None: 122 | return f"'{value.isoformat()}'" 123 | 124 | return process 125 | 126 | 127 | class _CUSTOM_DateTime(SnowflakeType, sqltypes.DateTime): 128 | def literal_processor(self, dialect): 129 | def process(value): 130 | if value is not None: 131 | datetime_str = value.isoformat(" ", timespec="microseconds") 132 | return f"'{datetime_str}'" 133 | 134 | return process 135 | 136 | 137 | class _CUSTOM_Time(SnowflakeType, sqltypes.Time): 138 | def literal_processor(self, dialect): 139 | def process(value): 140 | if value is not None: 141 | time_str = value.isoformat(timespec="microseconds") 142 | return f"'{time_str}'" 143 | 144 | return process 145 | 146 | 147 | class _CUSTOM_Float(SnowflakeType, sqltypes.Float): 148 | def bind_processor(self, dialect): 149 | return _process_float 150 | 151 | 152 | class _CUSTOM_DECIMAL(SnowflakeType, sqltypes.DECIMAL): 153 | @util.memoized_property 154 | def _type_affinity(self): 155 | return sqltypes.INTEGER if self.scale == 0 else sqltypes.DECIMAL 156 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/exc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | from typing import List 5 | 6 | from sqlalchemy.exc import ArgumentError 7 | 8 | 9 | class NoPrimaryKeyError(ArgumentError): 10 | def __init__(self, target: str): 11 | super().__init__(f"Table {target} required primary key.") 12 | 13 | 14 | class UnsupportedPrimaryKeysAndForeignKeysError(ArgumentError): 15 | def __init__(self, target: str): 16 | super().__init__(f"Primary key and foreign keys are not supported in {target}.") 17 | 18 | 19 | class RequiredParametersNotProvidedError(ArgumentError): 20 | def __init__(self, target: str, parameters: List[str]): 21 | super().__init__( 22 | f"{target} requires the following parameters: %s." % ", ".join(parameters) 23 | ) 24 | 25 | 26 | class UnexpectedTableOptionKeyError(ArgumentError): 27 | def __init__(self, expected: str, actual: str): 28 | super().__init__(f"Expected table option {expected} but got {actual}.") 29 | 30 | 31 | class OptionKeyNotProvidedError(ArgumentError): 32 | def __init__(self, target: str): 33 | super().__init__( 34 | f"Expected option key in {target} option but got NoneType instead." 35 | ) 36 | 37 | 38 | class UnexpectedOptionParameterTypeError(ArgumentError): 39 | def __init__(self, parameter_name: str, target: str, types: List[str]): 40 | super().__init__( 41 | f"Parameter {parameter_name} of {target} requires to be one" 42 | f" of following types: {', '.join(types)}." 43 | ) 44 | 45 | 46 | class CustomOptionsAreOnlySupportedOnSnowflakeTables(ArgumentError): 47 | def __init__(self): 48 | super().__init__( 49 | "Identifier, Literal, TargetLag and other custom options are only supported on Snowflake tables." 50 | ) 51 | 52 | 53 | class UnexpectedOptionTypeError(ArgumentError): 54 | def __init__(self, options: List[str]): 55 | super().__init__( 56 | f"The following options are either unsupported or should be defined using a Snowflake table: {', '.join(options)}." 57 | ) 58 | 59 | 60 | class InvalidTableParameterTypeError(ArgumentError): 61 | def __init__(self, name: str, input_type: str, expected_types: List[str]): 62 | expected_types_str = "', '".join(expected_types) 63 | super().__init__( 64 | f"Invalid parameter type '{input_type}' provided for '{name}'. " 65 | f"Expected one of the following types: '{expected_types_str}'.\n" 66 | ) 67 | 68 | 69 | class MultipleErrors(ArgumentError): 70 | def __init__(self, errors): 71 | self.errors = errors 72 | 73 | def __str__(self): 74 | return "".join(str(e) for e in self.errors) 75 | 76 | 77 | class StructuredTypeNotSupportedInTableColumnsError(ArgumentError): 78 | def __init__(self, table_type: str, table_name: str, column_name: str): 79 | super().__init__( 80 | f"Column '{column_name}' is of a structured type, which is only supported on Iceberg tables. " 81 | f"The table '{table_name}' is of type '{table_type}', not Iceberg." 82 | ) 83 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | import warnings 5 | 6 | from sqlalchemy.sql import functions as sqlfunc 7 | 8 | FLATTEN_WARNING = "For backward compatibility params are not rendered." 9 | 10 | 11 | class flatten(sqlfunc.GenericFunction): 12 | name = "flatten" 13 | 14 | def __init__(self, *args, **kwargs): 15 | warnings.warn(FLATTEN_WARNING, DeprecationWarning, stacklevel=2) 16 | super().__init__(*args, **kwargs) 17 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/provision.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from sqlalchemy.testing.provision import set_default_schema_on_connection 5 | 6 | 7 | # This is only for test purpose required by Requirement "default_schema_name_switch" 8 | @set_default_schema_on_connection.for_db("snowflake") 9 | def _snowflake_set_default_schema_on_connection(cfg, dbapi_connection, schema_name): 10 | cursor = dbapi_connection.cursor() 11 | cursor.execute(f"USE SCHEMA {dbapi_connection.database}.{schema_name};") 12 | cursor.close() 13 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from .dynamic_table import DynamicTable 5 | from .hybrid_table import HybridTable 6 | from .iceberg_table import IcebergTable 7 | from .snowflake_table import SnowflakeTable 8 | 9 | __all__ = ["DynamicTable", "HybridTable", "IcebergTable", "SnowflakeTable"] 10 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/clustered_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from typing import Any, Optional 6 | 7 | from sqlalchemy.sql.schema import MetaData, SchemaItem 8 | 9 | from .custom_table_base import CustomTableBase 10 | from .options.as_query_option import AsQueryOption 11 | from .options.cluster_by_option import ClusterByOption, ClusterByOptionType 12 | from .options.table_option import TableOptionKey 13 | 14 | 15 | class ClusteredTableBase(CustomTableBase): 16 | 17 | @property 18 | def cluster_by(self) -> Optional[AsQueryOption]: 19 | return self._get_dialect_option(TableOptionKey.CLUSTER_BY) 20 | 21 | def __init__( 22 | self, 23 | name: str, 24 | metadata: MetaData, 25 | *args: SchemaItem, 26 | cluster_by: ClusterByOptionType = None, 27 | **kw: Any, 28 | ) -> None: 29 | if kw.get("_no_init", True): 30 | return 31 | 32 | options = [ 33 | ClusterByOption.create(cluster_by), 34 | ] 35 | 36 | kw.update(self._as_dialect_options(options)) 37 | super().__init__(name, metadata, *args, **kw) 38 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/custom_table_base.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import typing 5 | from typing import Any, List 6 | 7 | from sqlalchemy.sql.schema import MetaData, SchemaItem, Table 8 | 9 | from ..._constants import DIALECT_NAME 10 | from ...compat import IS_VERSION_20 11 | from ...custom_commands import NoneType 12 | from ...custom_types import StructuredType 13 | from ...exc import ( 14 | MultipleErrors, 15 | NoPrimaryKeyError, 16 | RequiredParametersNotProvidedError, 17 | StructuredTypeNotSupportedInTableColumnsError, 18 | UnsupportedPrimaryKeysAndForeignKeysError, 19 | ) 20 | from .custom_table_prefix import CustomTablePrefix 21 | from .options.invalid_table_option import InvalidTableOption 22 | from .options.table_option import TableOption, TableOptionKey 23 | 24 | 25 | class CustomTableBase(Table): 26 | __table_prefixes__: typing.List[CustomTablePrefix] = [] 27 | _support_primary_and_foreign_keys: bool = True 28 | _enforce_primary_keys: bool = False 29 | _required_parameters: List[TableOptionKey] = [] 30 | _support_structured_types: bool = False 31 | 32 | @property 33 | def table_prefixes(self) -> typing.List[str]: 34 | return [prefix.name for prefix in self.__table_prefixes__] 35 | 36 | def __init__( 37 | self, 38 | name: str, 39 | metadata: MetaData, 40 | *args: SchemaItem, 41 | **kw: Any, 42 | ) -> None: 43 | if len(self.__table_prefixes__) > 0: 44 | prefixes = kw.get("prefixes", []) + self.table_prefixes 45 | kw.update(prefixes=prefixes) 46 | 47 | if not IS_VERSION_20 and hasattr(super(), "_init"): 48 | kw.pop("_no_init", True) 49 | super()._init(name, metadata, *args, **kw) 50 | else: 51 | super().__init__(name, metadata, *args, **kw) 52 | 53 | if not kw.get("autoload_with", False): 54 | self._validate_table() 55 | 56 | def _validate_table(self): 57 | exceptions: List[Exception] = [] 58 | 59 | columns_validation = self.__validate_columns() 60 | if columns_validation is not None: 61 | exceptions.append(columns_validation) 62 | 63 | for _, option in self.dialect_options[DIALECT_NAME].items(): 64 | if isinstance(option, InvalidTableOption): 65 | exceptions.append(option.exception) 66 | 67 | if isinstance(self.key, NoneType) and self._enforce_primary_keys: 68 | exceptions.append(NoPrimaryKeyError(self.__class__.__name__)) 69 | missing_parameters: List[str] = [] 70 | 71 | for required_parameter in self._required_parameters: 72 | if isinstance(self._get_dialect_option(required_parameter), NoneType): 73 | missing_parameters.append(required_parameter.name.lower()) 74 | if missing_parameters: 75 | exceptions.append( 76 | RequiredParametersNotProvidedError( 77 | self.__class__.__name__, missing_parameters 78 | ) 79 | ) 80 | 81 | if not self._support_primary_and_foreign_keys and ( 82 | self.primary_key or self.foreign_keys 83 | ): 84 | exceptions.append( 85 | UnsupportedPrimaryKeysAndForeignKeysError(self.__class__.__name__) 86 | ) 87 | 88 | if len(exceptions) > 1: 89 | exceptions.sort(key=lambda e: str(e)) 90 | raise MultipleErrors(exceptions) 91 | elif len(exceptions) == 1: 92 | raise exceptions[0] 93 | 94 | def __validate_columns(self): 95 | for column in self.columns: 96 | if not self._support_structured_types and isinstance( 97 | column.type, StructuredType 98 | ): 99 | return StructuredTypeNotSupportedInTableColumnsError( 100 | self.__class__.__name__, self.name, column.name 101 | ) 102 | 103 | def _get_dialect_option( 104 | self, option_name: TableOptionKey 105 | ) -> typing.Optional[TableOption]: 106 | if option_name.value in self.dialect_options[DIALECT_NAME]: 107 | return self.dialect_options[DIALECT_NAME][option_name.value] 108 | return None 109 | 110 | def _as_dialect_options( 111 | self, table_options: List[TableOption] 112 | ) -> typing.Dict[str, TableOption]: 113 | result = {} 114 | for table_option in table_options: 115 | if isinstance(table_option, TableOption) and isinstance( 116 | table_option.option_name, str 117 | ): 118 | result[DIALECT_NAME + "_" + table_option.option_name] = table_option 119 | return result 120 | 121 | @classmethod 122 | def is_equal_type(cls, table: Table) -> bool: 123 | for prefix in cls.__table_prefixes__: 124 | if prefix.name not in table._prefixes: 125 | return False 126 | 127 | return True 128 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/custom_table_prefix.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | from enum import Enum 5 | 6 | 7 | class CustomTablePrefix(Enum): 8 | DEFAULT = 0 9 | EXTERNAL = 1 10 | EVENT = 2 11 | HYBRID = 3 12 | ICEBERG = 4 13 | DYNAMIC = 5 14 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/dynamic_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import typing 6 | from typing import Any, Union 7 | 8 | from sqlalchemy.sql.schema import MetaData, SchemaItem 9 | 10 | from .custom_table_prefix import CustomTablePrefix 11 | from .options import ( 12 | IdentifierOption, 13 | IdentifierOptionType, 14 | KeywordOptionType, 15 | TableOptionKey, 16 | TargetLagOption, 17 | TargetLagOptionType, 18 | ) 19 | from .options.keyword_option import KeywordOption 20 | from .table_from_query import TableFromQueryBase 21 | 22 | 23 | class DynamicTable(TableFromQueryBase): 24 | """ 25 | A class representing a dynamic table with configurable options and settings. 26 | 27 | The `DynamicTable` class allows for the creation and querying of tables with 28 | specific options, such as `Warehouse` and `TargetLag`. 29 | 30 | While it does not support reflection at this time, it provides a flexible 31 | interface for creating dynamic tables and management. 32 | 33 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table 34 | 35 | Example using option values: 36 | DynamicTable( 37 | "dynamic_test_table_1", 38 | metadata, 39 | Column("id", Integer), 40 | Column("name", String), 41 | target_lag=(1, TimeUnit.HOURS), 42 | warehouse='warehouse_name', 43 | refresh_mode=SnowflakeKeyword.AUTO 44 | as_query="SELECT id, name from test_table_1;" 45 | ) 46 | 47 | Example using explicit options: 48 | DynamicTable( 49 | "dynamic_test_table_1", 50 | metadata, 51 | Column("id", Integer), 52 | Column("name", String), 53 | target_lag=TargetLag(1, TimeUnit.HOURS), 54 | warehouse=Identifier('warehouse_name'), 55 | refresh_mode=KeywordOption(SnowflakeKeyword.AUTO) 56 | as_query=AsQuery("SELECT id, name from test_table_1;") 57 | ) 58 | """ 59 | 60 | __table_prefixes__ = [CustomTablePrefix.DYNAMIC] 61 | _support_primary_and_foreign_keys = False 62 | _required_parameters = [ 63 | TableOptionKey.WAREHOUSE, 64 | TableOptionKey.AS_QUERY, 65 | TableOptionKey.TARGET_LAG, 66 | ] 67 | 68 | @property 69 | def warehouse(self) -> typing.Optional[IdentifierOption]: 70 | return self._get_dialect_option(TableOptionKey.WAREHOUSE) 71 | 72 | @property 73 | def target_lag(self) -> typing.Optional[TargetLagOption]: 74 | return self._get_dialect_option(TableOptionKey.TARGET_LAG) 75 | 76 | def __init__( 77 | self, 78 | name: str, 79 | metadata: MetaData, 80 | *args: SchemaItem, 81 | warehouse: IdentifierOptionType = None, 82 | target_lag: Union[TargetLagOptionType, KeywordOptionType] = None, 83 | refresh_mode: KeywordOptionType = None, 84 | **kw: Any, 85 | ) -> None: 86 | if kw.get("_no_init", True): 87 | return 88 | 89 | options = [ 90 | IdentifierOption.create(TableOptionKey.WAREHOUSE, warehouse), 91 | TargetLagOption.create(target_lag), 92 | KeywordOption.create(TableOptionKey.REFRESH_MODE, refresh_mode), 93 | ] 94 | 95 | kw.update(self._as_dialect_options(options)) 96 | super().__init__(name, metadata, *args, **kw) 97 | 98 | def _init( 99 | self, 100 | name: str, 101 | metadata: MetaData, 102 | *args: SchemaItem, 103 | **kw: Any, 104 | ) -> None: 105 | self.__init__(name, metadata, *args, _no_init=False, **kw) 106 | 107 | def __repr__(self) -> str: 108 | return "DynamicTable(%s)" % ", ".join( 109 | [repr(self.name)] 110 | + [repr(self.metadata)] 111 | + [repr(x) for x in self.columns] 112 | + [repr(self.target_lag)] 113 | + [repr(self.warehouse)] 114 | + [repr(self.cluster_by)] 115 | + [repr(self.as_query)] 116 | + [f"{k}={repr(getattr(self, k))}" for k in ["schema"]] 117 | ) 118 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/hybrid_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from typing import Any 6 | 7 | from sqlalchemy.sql.schema import MetaData, SchemaItem 8 | 9 | from .custom_table_base import CustomTableBase 10 | from .custom_table_prefix import CustomTablePrefix 11 | 12 | 13 | class HybridTable(CustomTableBase): 14 | """ 15 | A class representing a hybrid table with configurable options and settings. 16 | 17 | The `HybridTable` class allows for the creation and querying of OLTP Snowflake Tables . 18 | 19 | While it does not support reflection at this time, it provides a flexible 20 | interface for creating hybrid tables and management. 21 | 22 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-hybrid-table 23 | 24 | Example usage: 25 | HybridTable( 26 | table_name, 27 | metadata, 28 | Column("id", Integer, primary_key=True), 29 | Column("name", String) 30 | ) 31 | """ 32 | 33 | __table_prefixes__ = [CustomTablePrefix.HYBRID] 34 | _enforce_primary_keys: bool = True 35 | _support_structured_types = True 36 | 37 | def __init__( 38 | self, 39 | name: str, 40 | metadata: MetaData, 41 | *args: SchemaItem, 42 | **kw: Any, 43 | ) -> None: 44 | if kw.get("_no_init", True): 45 | return 46 | super().__init__(name, metadata, *args, **kw) 47 | 48 | def _init( 49 | self, 50 | name: str, 51 | metadata: MetaData, 52 | *args: SchemaItem, 53 | **kw: Any, 54 | ) -> None: 55 | self.__init__(name, metadata, *args, _no_init=False, **kw) 56 | 57 | def __repr__(self) -> str: 58 | return "HybridTable(%s)" % ", ".join( 59 | [repr(self.name)] 60 | + [repr(self.metadata)] 61 | + [repr(x) for x in self.columns] 62 | + [f"{k}={repr(getattr(self, k))}" for k in ["schema"]] 63 | ) 64 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/iceberg_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import typing 6 | from typing import Any 7 | 8 | from sqlalchemy.sql.schema import MetaData, SchemaItem 9 | 10 | from .custom_table_prefix import CustomTablePrefix 11 | from .options import LiteralOption, LiteralOptionType, TableOptionKey 12 | from .table_from_query import TableFromQueryBase 13 | 14 | 15 | class IcebergTable(TableFromQueryBase): 16 | """ 17 | A class representing an iceberg table with configurable options and settings. 18 | 19 | While it does not support reflection at this time, it provides a flexible 20 | interface for creating iceberg tables and management. 21 | 22 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table 23 | 24 | Example using option values: 25 | 26 | IcebergTable( 27 | "dynamic_test_table_1", 28 | metadata, 29 | Column("id", Integer), 30 | Column("name", String), 31 | external_volume='my_external_volume', 32 | base_location='my_iceberg_table'" 33 | ) 34 | 35 | Example using explicit options: 36 | DynamicTable( 37 | "dynamic_test_table_1", 38 | metadata, 39 | Column("id", Integer), 40 | Column("name", String), 41 | external_volume=LiteralOption('my_external_volume') 42 | base_location=LiteralOption('my_iceberg_table') 43 | ) 44 | """ 45 | 46 | __table_prefixes__ = [CustomTablePrefix.ICEBERG] 47 | _support_structured_types = True 48 | 49 | @property 50 | def external_volume(self) -> typing.Optional[LiteralOption]: 51 | return self._get_dialect_option(TableOptionKey.EXTERNAL_VOLUME) 52 | 53 | @property 54 | def base_location(self) -> typing.Optional[LiteralOption]: 55 | return self._get_dialect_option(TableOptionKey.BASE_LOCATION) 56 | 57 | @property 58 | def catalog(self) -> typing.Optional[LiteralOption]: 59 | return self._get_dialect_option(TableOptionKey.CATALOG) 60 | 61 | def __init__( 62 | self, 63 | name: str, 64 | metadata: MetaData, 65 | *args: SchemaItem, 66 | external_volume: LiteralOptionType = None, 67 | base_location: LiteralOptionType = None, 68 | **kw: Any, 69 | ) -> None: 70 | if kw.get("_no_init", True): 71 | return 72 | 73 | options = [ 74 | LiteralOption.create(TableOptionKey.EXTERNAL_VOLUME, external_volume), 75 | LiteralOption.create(TableOptionKey.BASE_LOCATION, base_location), 76 | LiteralOption.create(TableOptionKey.CATALOG, "SNOWFLAKE"), 77 | ] 78 | 79 | kw.update(self._as_dialect_options(options)) 80 | super().__init__(name, metadata, *args, **kw) 81 | 82 | def _init( 83 | self, 84 | name: str, 85 | metadata: MetaData, 86 | *args: SchemaItem, 87 | **kw: Any, 88 | ) -> None: 89 | self.__init__(name, metadata, *args, _no_init=False, **kw) 90 | 91 | def __repr__(self) -> str: 92 | return "IcebergTable(%s)" % ", ".join( 93 | [repr(self.name)] 94 | + [repr(self.metadata)] 95 | + [repr(x) for x in self.columns] 96 | + [repr(self.external_volume)] 97 | + [repr(self.base_location)] 98 | + [repr(self.catalog)] 99 | + [repr(self.cluster_by)] 100 | + [repr(self.as_query)] 101 | + [f"{k}={repr(getattr(self, k))}" for k in ["schema"]] 102 | ) 103 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from .as_query_option import AsQueryOption, AsQueryOptionType 6 | from .cluster_by_option import ClusterByOption, ClusterByOptionType 7 | from .identifier_option import IdentifierOption, IdentifierOptionType 8 | from .keyword_option import KeywordOption, KeywordOptionType 9 | from .keywords import SnowflakeKeyword 10 | from .literal_option import LiteralOption, LiteralOptionType 11 | from .table_option import TableOptionKey 12 | from .target_lag_option import TargetLagOption, TargetLagOptionType, TimeUnit 13 | 14 | __all__ = [ 15 | # Options 16 | "IdentifierOption", 17 | "LiteralOption", 18 | "KeywordOption", 19 | "AsQueryOption", 20 | "TargetLagOption", 21 | "ClusterByOption", 22 | # Enums 23 | "TimeUnit", 24 | "SnowflakeKeyword", 25 | "TableOptionKey", 26 | # Types 27 | "IdentifierOptionType", 28 | "LiteralOptionType", 29 | "AsQueryOptionType", 30 | "TargetLagOptionType", 31 | "KeywordOptionType", 32 | "ClusterByOptionType", 33 | ] 34 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/as_query_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Optional, Union 5 | 6 | from sqlalchemy.sql import Selectable 7 | 8 | from snowflake.sqlalchemy.custom_commands import NoneType 9 | 10 | from .table_option import Priority, TableOption, TableOptionKey 11 | 12 | 13 | class AsQueryOption(TableOption): 14 | """Class to represent an AS clause in tables. 15 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-table#create-table-as-select-also-referred-to-as-ctas 16 | 17 | Example: 18 | as_query=AsQueryOption('select name, address from existing_table where name = "test"') 19 | 20 | is equivalent to: 21 | 22 | as select name, address from existing_table where name = "test" 23 | """ 24 | 25 | def __init__(self, query: Union[str, Selectable]) -> None: 26 | super().__init__() 27 | self._name: TableOptionKey = TableOptionKey.AS_QUERY 28 | self.query = query 29 | 30 | @staticmethod 31 | def create( 32 | value: Optional[Union["AsQueryOption", str, Selectable]] 33 | ) -> "TableOption": 34 | if isinstance(value, (NoneType, AsQueryOption)): 35 | return value 36 | if isinstance(value, (str, Selectable)): 37 | return AsQueryOption(value) 38 | return TableOption._get_invalid_table_option( 39 | TableOptionKey.AS_QUERY, 40 | str(type(value).__name__), 41 | [AsQueryOption.__name__, str.__name__, Selectable.__name__], 42 | ) 43 | 44 | def template(self) -> str: 45 | return "AS %s" 46 | 47 | @property 48 | def priority(self) -> Priority: 49 | return Priority.LOWEST 50 | 51 | def __get_expression(self): 52 | if isinstance(self.query, Selectable): 53 | return self.query.compile(compile_kwargs={"literal_binds": True}) 54 | return self.query 55 | 56 | def _render(self, compiler) -> str: 57 | return self.template() % (self.__get_expression()) 58 | 59 | def __repr__(self) -> str: 60 | return "AsQueryOption(%s)" % self.__get_expression() 61 | 62 | 63 | AsQueryOptionType = Union[AsQueryOption, str, Selectable] 64 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/cluster_by_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import List, Union 5 | 6 | from sqlalchemy.sql.expression import TextClause 7 | 8 | from snowflake.sqlalchemy.custom_commands import NoneType 9 | 10 | from .table_option import Priority, TableOption, TableOptionKey 11 | 12 | 13 | class ClusterByOption(TableOption): 14 | """Class to represent the cluster by clause in tables. 15 | For further information on this clause, please refer to: https://docs.snowflake.com/en/user-guide/tables-clustering-keys 16 | Example: 17 | cluster_by=ClusterByOption('name', text('id > 0')) 18 | 19 | is equivalent to: 20 | 21 | cluster by (name, id > 0) 22 | """ 23 | 24 | def __init__(self, *expressions: Union[str, TextClause]) -> None: 25 | super().__init__() 26 | self._name: TableOptionKey = TableOptionKey.CLUSTER_BY 27 | self.expressions = expressions 28 | 29 | @staticmethod 30 | def create(value: "ClusterByOptionType") -> "TableOption": 31 | if isinstance(value, (NoneType, ClusterByOption)): 32 | return value 33 | if isinstance(value, List): 34 | return ClusterByOption(*value) 35 | return TableOption._get_invalid_table_option( 36 | TableOptionKey.CLUSTER_BY, 37 | str(type(value).__name__), 38 | [ClusterByOption.__name__, list.__name__], 39 | ) 40 | 41 | def template(self) -> str: 42 | return f"{self.option_name.upper()} (%s)" 43 | 44 | @property 45 | def priority(self) -> Priority: 46 | return Priority.HIGH 47 | 48 | def __get_expression(self): 49 | return ", ".join([str(expression) for expression in self.expressions]) 50 | 51 | def _render(self, compiler) -> str: 52 | return self.template() % (self.__get_expression()) 53 | 54 | def __repr__(self) -> str: 55 | return "ClusterByOption(%s)" % self.__get_expression() 56 | 57 | 58 | ClusterByOptionType = Union[ClusterByOption, List[Union[str, TextClause]]] 59 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/identifier_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Optional, Union 5 | 6 | from snowflake.sqlalchemy.custom_commands import NoneType 7 | 8 | from .table_option import Priority, TableOption, TableOptionKey 9 | 10 | 11 | class IdentifierOption(TableOption): 12 | """Class to represent an identifier option in Snowflake Tables. 13 | 14 | Example: 15 | warehouse = IdentifierOption('my_warehouse') 16 | 17 | is equivalent to: 18 | 19 | WAREHOUSE = my_warehouse 20 | """ 21 | 22 | def __init__(self, value: Union[str]) -> None: 23 | super().__init__() 24 | self.value: str = value 25 | 26 | @property 27 | def priority(self): 28 | return Priority.HIGH 29 | 30 | @staticmethod 31 | def create( 32 | name: TableOptionKey, value: Optional[Union[str, "IdentifierOption"]] 33 | ) -> Optional[TableOption]: 34 | if isinstance(value, NoneType): 35 | return None 36 | 37 | if isinstance(value, str): 38 | value = IdentifierOption(value) 39 | 40 | if isinstance(value, IdentifierOption): 41 | value._set_option_name(name) 42 | return value 43 | 44 | return TableOption._get_invalid_table_option( 45 | name, str(type(value).__name__), [IdentifierOption.__name__, str.__name__] 46 | ) 47 | 48 | def template(self) -> str: 49 | return f"{self.option_name.upper()} = %s" 50 | 51 | def _render(self, compiler) -> str: 52 | return self.template() % self.value 53 | 54 | def __repr__(self) -> str: 55 | option_name = ( 56 | f", table_option_key={self.option_name}" 57 | if not isinstance(self.option_name, NoneType) 58 | else "" 59 | ) 60 | return f"IdentifierOption(value='{self.value}'{option_name})" 61 | 62 | 63 | IdentifierOptionType = Union[IdentifierOption, str, int] 64 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/invalid_table_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Optional 5 | 6 | from .table_option import TableOption, TableOptionKey 7 | 8 | 9 | class InvalidTableOption(TableOption): 10 | """Class to store errors and raise them after table initialization in order to avoid recursion error.""" 11 | 12 | def __init__(self, name: TableOptionKey, value: Exception) -> None: 13 | super().__init__() 14 | self.exception: Exception = value 15 | self._name = name 16 | 17 | @staticmethod 18 | def create(name: TableOptionKey, value: Exception) -> Optional[TableOption]: 19 | return InvalidTableOption(name, value) 20 | 21 | def _render(self, compiler) -> str: 22 | raise self.exception 23 | 24 | def __repr__(self) -> str: 25 | return f"ErrorOption(value='{self.exception}')" 26 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/keyword_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Optional, Union 5 | 6 | from snowflake.sqlalchemy.custom_commands import NoneType 7 | 8 | from .keywords import SnowflakeKeyword 9 | from .table_option import Priority, TableOption, TableOptionKey 10 | 11 | 12 | class KeywordOption(TableOption): 13 | """Class to represent a keyword option in Snowflake Tables. 14 | 15 | Example: 16 | target_lag = KeywordOption(SnowflakeKeyword.DOWNSTREAM) 17 | 18 | is equivalent to: 19 | 20 | TARGET_LAG = DOWNSTREAM 21 | """ 22 | 23 | def __init__(self, value: Union[SnowflakeKeyword]) -> None: 24 | super().__init__() 25 | self.value: str = value.value 26 | 27 | @property 28 | def priority(self): 29 | return Priority.HIGH 30 | 31 | def template(self) -> str: 32 | return f"{self.option_name.upper()} = %s" 33 | 34 | def _render(self, compiler) -> str: 35 | return self.template() % self.value.upper() 36 | 37 | @staticmethod 38 | def create( 39 | name: TableOptionKey, value: Optional[Union[SnowflakeKeyword, "KeywordOption"]] 40 | ) -> Optional[TableOption]: 41 | if isinstance(value, NoneType): 42 | return value 43 | if isinstance(value, SnowflakeKeyword): 44 | value = KeywordOption(value) 45 | 46 | if isinstance(value, KeywordOption): 47 | value._set_option_name(name) 48 | return value 49 | 50 | return TableOption._get_invalid_table_option( 51 | name, 52 | str(type(value).__name__), 53 | [KeywordOption.__name__, SnowflakeKeyword.__name__], 54 | ) 55 | 56 | def __repr__(self) -> str: 57 | option_name = ( 58 | f", table_option_key={self.option_name}" 59 | if isinstance(self.option_name, NoneType) 60 | else "" 61 | ) 62 | return f"KeywordOption(value='{self.value}'{option_name})" 63 | 64 | 65 | KeywordOptionType = Union[KeywordOption, SnowflakeKeyword] 66 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/keywords.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | from enum import Enum 5 | 6 | 7 | class SnowflakeKeyword(Enum): 8 | # TARGET_LAG 9 | DOWNSTREAM = "DOWNSTREAM" 10 | 11 | # REFRESH_MODE 12 | AUTO = "AUTO" 13 | FULL = "FULL" 14 | INCREMENTAL = "INCREMENTAL" 15 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/literal_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from typing import Any, Optional, Union 5 | 6 | from snowflake.sqlalchemy.custom_commands import NoneType 7 | 8 | from .table_option import Priority, TableOption, TableOptionKey 9 | 10 | 11 | class LiteralOption(TableOption): 12 | """Class to represent a literal option in Snowflake Table. 13 | 14 | Example: 15 | warehouse = LiteralOption('my_warehouse') 16 | 17 | is equivalent to: 18 | 19 | WAREHOUSE = 'my_warehouse' 20 | """ 21 | 22 | def __init__(self, value: Union[int, str]) -> None: 23 | super().__init__() 24 | self.value: Any = value 25 | 26 | @property 27 | def priority(self): 28 | return Priority.HIGH 29 | 30 | @staticmethod 31 | def create( 32 | name: TableOptionKey, value: Optional[Union[str, int, "LiteralOption"]] 33 | ) -> Optional[TableOption]: 34 | if isinstance(value, NoneType): 35 | return None 36 | if isinstance(value, (str, int)): 37 | value = LiteralOption(value) 38 | 39 | if isinstance(value, LiteralOption): 40 | value._set_option_name(name) 41 | return value 42 | 43 | return TableOption._get_invalid_table_option( 44 | name, 45 | str(type(value).__name__), 46 | [LiteralOption.__name__, str.__name__, int.__name__], 47 | ) 48 | 49 | def template(self) -> str: 50 | if isinstance(self.value, int): 51 | return f"{self.option_name.upper()} = %d" 52 | else: 53 | return f"{self.option_name.upper()} = '%s'" 54 | 55 | def _render(self, compiler) -> str: 56 | return self.template() % self.value 57 | 58 | def __repr__(self) -> str: 59 | option_name = ( 60 | f", table_option_key={self.option_name}" 61 | if not isinstance(self.option_name, NoneType) 62 | else "" 63 | ) 64 | return f"LiteralOption(value='{self.value}'{option_name})" 65 | 66 | 67 | LiteralOptionType = Union[LiteralOption, str, int] 68 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/table_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from enum import Enum 5 | from typing import List, Optional 6 | 7 | from snowflake.sqlalchemy import exc 8 | from snowflake.sqlalchemy.custom_commands import NoneType 9 | 10 | 11 | class Priority(Enum): 12 | LOWEST = 0 13 | VERY_LOW = 1 14 | LOW = 2 15 | MEDIUM = 4 16 | HIGH = 6 17 | VERY_HIGH = 7 18 | HIGHEST = 8 19 | 20 | 21 | class TableOption: 22 | 23 | def __init__(self) -> None: 24 | self._name: Optional[TableOptionKey] = None 25 | 26 | @property 27 | def option_name(self) -> str: 28 | if isinstance(self._name, NoneType): 29 | return None 30 | return str(self._name.value) 31 | 32 | def _set_option_name(self, name: Optional["TableOptionKey"]): 33 | self._name = name 34 | 35 | @property 36 | def priority(self) -> Priority: 37 | return Priority.MEDIUM 38 | 39 | @staticmethod 40 | def create(**kwargs) -> "TableOption": 41 | raise NotImplementedError 42 | 43 | @staticmethod 44 | def _get_invalid_table_option( 45 | parameter_name: "TableOptionKey", input_type: str, expected_types: List[str] 46 | ) -> "TableOption": 47 | from .invalid_table_option import InvalidTableOption 48 | 49 | return InvalidTableOption( 50 | parameter_name, 51 | exc.InvalidTableParameterTypeError( 52 | parameter_name.value, input_type, expected_types 53 | ), 54 | ) 55 | 56 | def _validate_option(self): 57 | if isinstance(self.option_name, NoneType): 58 | raise exc.OptionKeyNotProvidedError(self.__class__.__name__) 59 | 60 | def template(self) -> str: 61 | return f"{self.option_name.upper()} = %s" 62 | 63 | def render_option(self, compiler) -> str: 64 | self._validate_option() 65 | return self._render(compiler) 66 | 67 | def _render(self, compiler) -> str: 68 | raise NotImplementedError 69 | 70 | 71 | class TableOptionKey(Enum): 72 | AS_QUERY = "as_query" 73 | BASE_LOCATION = "base_location" 74 | CATALOG = "catalog" 75 | CATALOG_SYNC = "catalog_sync" 76 | CLUSTER_BY = "cluster by" 77 | DATA_RETENTION_TIME_IN_DAYS = "data_retention_time_in_days" 78 | DEFAULT_DDL_COLLATION = "default_ddl_collation" 79 | EXTERNAL_VOLUME = "external_volume" 80 | MAX_DATA_EXTENSION_TIME_IN_DAYS = "max_data_extension_time_in_days" 81 | REFRESH_MODE = "refresh_mode" 82 | STORAGE_SERIALIZATION_POLICY = "storage_serialization_policy" 83 | TARGET_LAG = "target_lag" 84 | WAREHOUSE = "warehouse" 85 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/options/target_lag_option.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # from enum import Enum 4 | from enum import Enum 5 | from typing import Optional, Tuple, Union 6 | 7 | from snowflake.sqlalchemy.custom_commands import NoneType 8 | 9 | from .keyword_option import KeywordOption, KeywordOptionType 10 | from .keywords import SnowflakeKeyword 11 | from .table_option import Priority, TableOption, TableOptionKey 12 | 13 | 14 | class TimeUnit(Enum): 15 | SECONDS = "seconds" 16 | MINUTES = "minutes" 17 | HOURS = "hours" 18 | DAYS = "days" 19 | 20 | 21 | class TargetLagOption(TableOption): 22 | """Class to represent the target lag clause in Dynamic Tables. 23 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table 24 | 25 | Example using the time and unit parameters: 26 | 27 | target_lag = TargetLagOption(10, TimeUnit.SECONDS) 28 | 29 | is equivalent to: 30 | 31 | TARGET_LAG = '10 SECONDS' 32 | 33 | Example using keyword parameter: 34 | 35 | target_lag = KeywordOption(SnowflakeKeyword.DOWNSTREAM) 36 | 37 | is equivalent to: 38 | 39 | TARGET_LAG = DOWNSTREAM 40 | 41 | """ 42 | 43 | def __init__( 44 | self, 45 | time: Optional[int] = 0, 46 | unit: Optional[TimeUnit] = TimeUnit.MINUTES, 47 | ) -> None: 48 | super().__init__() 49 | self.time = time 50 | self.unit = unit 51 | self._name: TableOptionKey = TableOptionKey.TARGET_LAG 52 | 53 | @staticmethod 54 | def create( 55 | value: Union["TargetLagOption", Tuple[int, TimeUnit], KeywordOptionType] 56 | ) -> Optional[TableOption]: 57 | if isinstance(value, NoneType): 58 | return value 59 | 60 | if isinstance(value, Tuple): 61 | time, unit = value 62 | value = TargetLagOption(time, unit) 63 | 64 | if isinstance(value, TargetLagOption): 65 | return value 66 | 67 | if isinstance(value, (KeywordOption, SnowflakeKeyword)): 68 | return KeywordOption.create(TableOptionKey.TARGET_LAG, value) 69 | 70 | return TableOption._get_invalid_table_option( 71 | TableOptionKey.TARGET_LAG, 72 | str(type(value).__name__), 73 | [ 74 | TargetLagOption.__name__, 75 | f"Tuple[int, {TimeUnit.__name__}])", 76 | SnowflakeKeyword.__name__, 77 | ], 78 | ) 79 | 80 | def __get_expression(self): 81 | return f"'{str(self.time)} {str(self.unit.value)}'" 82 | 83 | @property 84 | def priority(self) -> Priority: 85 | return Priority.HIGH 86 | 87 | def _render(self, compiler) -> str: 88 | return self.template() % (self.__get_expression()) 89 | 90 | def __repr__(self) -> str: 91 | return "TargetLagOption(%s)" % self.__get_expression() 92 | 93 | 94 | TargetLagOptionType = Union[TargetLagOption, Tuple[int, TimeUnit]] 95 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/snowflake_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from typing import Any 6 | 7 | from sqlalchemy.sql.schema import MetaData, SchemaItem 8 | 9 | from .table_from_query import TableFromQueryBase 10 | 11 | 12 | class SnowflakeTable(TableFromQueryBase): 13 | """ 14 | A class representing a table in Snowflake with configurable options and settings. 15 | 16 | While it does not support reflection at this time, it provides a flexible 17 | interface for creating tables and management. 18 | 19 | For further information on this clause, please refer to: https://docs.snowflake.com/en/sql-reference/sql/create-table 20 | Example usage: 21 | 22 | SnowflakeTable( 23 | table_name, 24 | metadata, 25 | Column("id", Integer, primary_key=True), 26 | Column("name", String), 27 | cluster_by = ["id", text("name > 5")] 28 | ) 29 | 30 | Example using explict options: 31 | 32 | SnowflakeTable( 33 | table_name, 34 | metadata, 35 | Column("id", Integer, primary_key=True), 36 | Column("name", String), 37 | cluster_by = ClusterByOption("id", text("name > 5")) 38 | ) 39 | 40 | """ 41 | 42 | def __init__( 43 | self, 44 | name: str, 45 | metadata: MetaData, 46 | *args: SchemaItem, 47 | **kw: Any, 48 | ) -> None: 49 | if kw.get("_no_init", True): 50 | return 51 | super().__init__(name, metadata, *args, **kw) 52 | 53 | def _init( 54 | self, 55 | name: str, 56 | metadata: MetaData, 57 | *args: SchemaItem, 58 | **kw: Any, 59 | ) -> None: 60 | self.__init__(name, metadata, *args, _no_init=False, **kw) 61 | 62 | def __repr__(self) -> str: 63 | return "SnowflakeTable(%s)" % ", ".join( 64 | [repr(self.name)] 65 | + [repr(self.metadata)] 66 | + [repr(x) for x in self.columns] 67 | + [repr(self.cluster_by)] 68 | + [repr(self.as_query)] 69 | + [f"{k}={repr(getattr(self, k))}" for k in ["schema"]] 70 | ) 71 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/sql/custom_schema/table_from_query.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import typing 5 | from typing import Any, Optional 6 | 7 | from sqlalchemy.sql import Selectable 8 | from sqlalchemy.sql.schema import Column, MetaData, SchemaItem 9 | 10 | from .clustered_table import ClusteredTableBase 11 | from .options.as_query_option import AsQueryOption, AsQueryOptionType 12 | from .options.table_option import TableOptionKey 13 | 14 | 15 | class TableFromQueryBase(ClusteredTableBase): 16 | 17 | @property 18 | def as_query(self) -> Optional[AsQueryOption]: 19 | return self._get_dialect_option(TableOptionKey.AS_QUERY) 20 | 21 | def __init__( 22 | self, 23 | name: str, 24 | metadata: MetaData, 25 | *args: SchemaItem, 26 | as_query: AsQueryOptionType = None, 27 | **kw: Any, 28 | ) -> None: 29 | items = [item for item in args] 30 | as_query = AsQueryOption.create(as_query) # noqa 31 | kw.update(self._as_dialect_options([as_query])) 32 | if ( 33 | isinstance(as_query, AsQueryOption) 34 | and isinstance(as_query.query, Selectable) 35 | and not self.__has_defined_columns(items) 36 | ): 37 | columns = self.__create_columns_from_selectable(as_query.query) 38 | args = items + columns 39 | super().__init__(name, metadata, *args, **kw) 40 | 41 | def __has_defined_columns(self, items: typing.List[SchemaItem]) -> bool: 42 | for item in items: 43 | if isinstance(item, Column): 44 | return True 45 | 46 | def __create_columns_from_selectable( 47 | self, selectable: Selectable 48 | ) -> Optional[typing.List[Column]]: 49 | if not isinstance(selectable, Selectable): 50 | return 51 | columns: typing.List[Column] = [] 52 | for _, c in selectable.exported_columns.items(): 53 | columns += [Column(c.name, c.type)] 54 | return columns 55 | -------------------------------------------------------------------------------- /src/snowflake/sqlalchemy/version.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | # Update this for the versions 5 | # Don't change the forth version number from None 6 | VERSION = "1.7.3" 7 | -------------------------------------------------------------------------------- /tested_requirements/requirements_310.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.10.5 2 | asn1crypto==1.5.1 3 | certifi==2022.6.15 4 | cffi==1.15.1 5 | charset-normalizer==2.0.12 6 | cryptography==36.0.2 7 | greenlet==1.1.2 8 | idna==3.3 9 | oscrypto==1.3.0 10 | pycparser==2.21 11 | pycryptodomex==3.15.0 12 | PyJWT==2.4.0 13 | pyOpenSSL==22.0.0 14 | pytz==2022.1 15 | requests==2.28.1 16 | snowflake-connector-python==2.7.9 17 | SQLAlchemy==1.4.39 18 | urllib3==1.26.9 19 | snowflake-sqlalchemy==1.3.4 20 | -------------------------------------------------------------------------------- /tested_requirements/requirements_37.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.7.13 2 | asn1crypto==1.5.1 3 | certifi==2022.6.15 4 | cffi==1.15.1 5 | charset-normalizer==2.0.12 6 | cryptography==36.0.2 7 | greenlet==1.1.2 8 | idna==3.3 9 | importlib-metadata==4.12.0 10 | oscrypto==1.3.0 11 | pycparser==2.21 12 | pycryptodomex==3.15.0 13 | PyJWT==2.4.0 14 | pyOpenSSL==22.0.0 15 | pytz==2022.1 16 | requests==2.28.1 17 | snowflake-connector-python==2.7.9 18 | SQLAlchemy==1.4.39 19 | typing_extensions==4.2.0 20 | urllib3==1.26.9 21 | zipp==3.8.0 22 | snowflake-sqlalchemy==1.3.4 23 | -------------------------------------------------------------------------------- /tested_requirements/requirements_38.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.8.13 2 | asn1crypto==1.5.1 3 | certifi==2022.6.15 4 | cffi==1.15.1 5 | charset-normalizer==2.0.12 6 | cryptography==36.0.2 7 | greenlet==1.1.2 8 | idna==3.3 9 | oscrypto==1.3.0 10 | pycparser==2.21 11 | pycryptodomex==3.15.0 12 | PyJWT==2.4.0 13 | pyOpenSSL==22.0.0 14 | pytz==2022.1 15 | requests==2.28.1 16 | snowflake-connector-python==2.7.9 17 | SQLAlchemy==1.4.39 18 | urllib3==1.26.9 19 | snowflake-sqlalchemy==1.3.4 20 | -------------------------------------------------------------------------------- /tested_requirements/requirements_39.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.9.13 2 | asn1crypto==1.5.1 3 | certifi==2022.6.15 4 | cffi==1.15.1 5 | charset-normalizer==2.0.12 6 | cryptography==36.0.2 7 | greenlet==1.1.2 8 | idna==3.3 9 | oscrypto==1.3.0 10 | pycparser==2.21 11 | pycryptodomex==3.15.0 12 | PyJWT==2.4.0 13 | pyOpenSSL==22.0.0 14 | pytz==2022.1 15 | requests==2.28.1 16 | snowflake-connector-python==2.7.9 17 | SQLAlchemy==1.4.39 18 | urllib3==1.26.9 19 | snowflake-sqlalchemy==1.3.4 20 | -------------------------------------------------------------------------------- /tests/README.rst: -------------------------------------------------------------------------------- 1 | Building and Testing Snowflake SQLAlchemy 2 | ******************************************************************************** 3 | 4 | Building 5 | ================================================================================ 6 | 7 | Install Python 3.5.0 or higher. Clone the Snowflake SQLAlchemy repository, then run the following command to create a wheel package: 8 | 9 | .. code-block:: bash 10 | 11 | git clone git@github.com:snowflakedb/snowflake-sqlalchemy.git 12 | cd snowflake-sqlalchemy 13 | pyvenv /tmp/test_snowflake_sqlalchemy 14 | source /tmp/test_snowflake_sqlalchemy/bin/activate 15 | pip install -U pip setuptools wheel 16 | python setup.py bdist_wheel 17 | 18 | Find the ``snowflake-sqlalchemy*.whl`` package in the ``./dist`` directory. 19 | 20 | 21 | Testing 22 | ================================================================================ 23 | 24 | Create a virtualenv, with ``parameters.py`` in a test directory. 25 | 26 | .. code-block:: bash 27 | 28 | pyvenv /tmp/test_snowflake_sqlalchemy 29 | source /tmp/test_snowflake_sqlalchemy/bin/activate 30 | pip install Cython 31 | pip install pytest numpy pandas 32 | pip install dist/snowflake_sqlalchemy*.whl 33 | vim tests/parameters.py 34 | 35 | In the ``parameters.py`` file, include the connection information in a Python dictionary. 36 | 37 | .. code-block:: python 38 | 39 | CONNECTION_PARAMETERS = { 40 | 'account': 'testaccount', 41 | 'user': 'user1', 42 | 'password': 'testpasswd', 43 | 'schema': 'testschema', 44 | 'database': 'testdb', 45 | } 46 | 47 | Run the test: 48 | 49 | .. code-block:: bash 50 | 51 | py.test test 52 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | -------------------------------------------------------------------------------- /tests/__snapshots__/test_compile_dynamic_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_dynamic_table 3 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 4 | # --- 5 | # name: test_compile_dynamic_table_orm 6 | "CREATE DYNAMIC TABLE test_dynamic_table_orm (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 7 | # --- 8 | # name: test_compile_dynamic_table_orm_with_str_keys 9 | "CREATE DYNAMIC TABLE test_dynamic_table_orm_2 (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 10 | # --- 11 | # name: test_compile_dynamic_table_with_selectable 12 | "CREATE DYNAMIC TABLE dynamic_test_table_1 (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT test_table_1.id, test_table_1.name FROM test_table_1 WHERE test_table_1.id = 23" 13 | # --- 14 | -------------------------------------------------------------------------------- /tests/__snapshots__/test_core.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_table_with_cluster_by_with_expression 3 | 'CREATE TABLE clustered_user (\t"Id" INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tPRIMARY KEY ("Id")) CLUSTER BY ("Id", name, "Id" > 5)' 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/__snapshots__/test_orm.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_orm_one_to_many_relationship_with_hybrid_table 3 | ProgrammingError('(snowflake.connector.errors.ProgrammingError) 200009 (22000): Foreign key constraint "SYS_INDEX_HB_TBL_ADDRESS_FOREIGN_KEY_USER_ID_HB_TBL_USER_ID" was violated.') 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/__snapshots__/test_reflect_dynamic_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_dynamic_table 3 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/__snapshots__/test_unit_structured_types.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_map_with_not_null 3 | 'MAP(DECIMAL(10, 0), VARCHAR NOT NULL)' 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/custom_tables/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_compile_dynamic_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_dynamic_table 3 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 4 | # --- 5 | # name: test_compile_dynamic_table_orm 6 | "CREATE DYNAMIC TABLE test_dynamic_table_orm (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT * FROM table" 7 | # --- 8 | # name: test_compile_dynamic_table_orm_with_str_keys 9 | 'CREATE DYNAMIC TABLE "SCHEMA_DB".test_dynamic_table_orm_2 (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = \'10 seconds\'\tAS SELECT * FROM table' 10 | # --- 11 | # name: test_compile_dynamic_table_with_multiple_wrong_option_types 12 | ''' 13 | Invalid parameter type 'IdentifierOption' provided for 'refresh_mode'. Expected one of the following types: 'KeywordOption', 'SnowflakeKeyword'. 14 | Invalid parameter type 'IdentifierOption' provided for 'target_lag'. Expected one of the following types: 'TargetLagOption', 'Tuple[int, TimeUnit])', 'SnowflakeKeyword'. 15 | Invalid parameter type 'KeywordOption' provided for 'as_query'. Expected one of the following types: 'AsQueryOption', 'str', 'Selectable'. 16 | Invalid parameter type 'KeywordOption' provided for 'warehouse'. Expected one of the following types: 'IdentifierOption', 'str'. 17 | 18 | ''' 19 | # --- 20 | # name: test_compile_dynamic_table_with_one_wrong_option_types 21 | ''' 22 | Invalid parameter type 'LiteralOption' provided for 'warehouse'. Expected one of the following types: 'IdentifierOption', 'str'. 23 | 24 | ''' 25 | # --- 26 | # name: test_compile_dynamic_table_with_options_objects 27 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tREFRESH_MODE = AUTO\tAS SELECT * FROM table" 28 | # --- 29 | # name: test_compile_dynamic_table_with_refresh_mode[SnowflakeKeyword.AUTO] 30 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tREFRESH_MODE = AUTO\tAS SELECT * FROM table" 31 | # --- 32 | # name: test_compile_dynamic_table_with_refresh_mode[SnowflakeKeyword.FULL] 33 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tREFRESH_MODE = FULL\tAS SELECT * FROM table" 34 | # --- 35 | # name: test_compile_dynamic_table_with_refresh_mode[SnowflakeKeyword.INCREMENTAL] 36 | "CREATE DYNAMIC TABLE test_dynamic_table (\tid INTEGER, \tgeom GEOMETRY)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tREFRESH_MODE = INCREMENTAL\tAS SELECT * FROM table" 37 | # --- 38 | # name: test_compile_dynamic_table_with_selectable 39 | "CREATE DYNAMIC TABLE dynamic_test_table_1 (\tid INTEGER, \tname VARCHAR)\tWAREHOUSE = warehouse\tTARGET_LAG = '10 seconds'\tAS SELECT test_table_1.id, test_table_1.name FROM test_table_1 WHERE test_table_1.id = 23" 40 | # --- 41 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_compile_hybrid_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_hybrid_table 3 | 'CREATE HYBRID TABLE test_hybrid_table (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tgeom GEOMETRY, \tPRIMARY KEY (id))' 4 | # --- 5 | # name: test_compile_hybrid_table_orm 6 | 'CREATE HYBRID TABLE test_hybrid_table_orm (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tPRIMARY KEY (id))' 7 | # --- 8 | # name: test_compile_hybrid_table_with_array 9 | 'CREATE HYBRID TABLE test_hybrid_table (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tgeom GEOMETRY, \tarray ARRAY, \tPRIMARY KEY (id))' 10 | # --- 11 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_compile_iceberg_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_dynamic_table_orm_with_as_query 3 | "CREATE ICEBERG TABLE test_iceberg_table_orm_2 (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tPRIMARY KEY (id))\tEXTERNAL_VOLUME = 'my_external_volume'\tCATALOG = 'SNOWFLAKE'\tBASE_LOCATION = 'my_iceberg_table'\tAS SELECT * FROM table" 4 | # --- 5 | # name: test_compile_icberg_table_with_primary_key 6 | "CREATE ICEBERG TABLE test_iceberg_table_with_options (\tid INTEGER NOT NULL AUTOINCREMENT, \tgeom VARCHAR, \tPRIMARY KEY (id))\tEXTERNAL_VOLUME = 'my_external_volume'\tCATALOG = 'SNOWFLAKE'\tBASE_LOCATION = 'my_iceberg_table'" 7 | # --- 8 | # name: test_compile_iceberg_table 9 | "CREATE ICEBERG TABLE test_iceberg_table (\tid INTEGER, \tgeom VARCHAR)\tEXTERNAL_VOLUME = 'my_external_volume'\tCATALOG = 'SNOWFLAKE'\tBASE_LOCATION = 'my_iceberg_table'" 10 | # --- 11 | # name: test_compile_iceberg_table_with_one_wrong_option_types 12 | ''' 13 | Invalid parameter type 'IdentifierOption' provided for 'external_volume'. Expected one of the following types: 'LiteralOption', 'str', 'int'. 14 | 15 | ''' 16 | # --- 17 | # name: test_compile_iceberg_table_with_options_objects 18 | "CREATE ICEBERG TABLE test_iceberg_table_with_options (\tid INTEGER, \tgeom VARCHAR)\tEXTERNAL_VOLUME = 'my_external_volume'\tCATALOG = 'SNOWFLAKE'\tBASE_LOCATION = 'my_iceberg_table'" 19 | # --- 20 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_compile_snowflake_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_compile_dynamic_table_orm_with_str_keys 3 | 'CREATE TABLE "SCHEMA_DB".test_snowflake_table_orm_2 (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tPRIMARY KEY (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 4 | # --- 5 | # name: test_compile_dynamic_table_with_foreign_key 6 | 'CREATE TABLE test_table_2 (\tid INTEGER NOT NULL, \tgeom VARCHAR, \tPRIMARY KEY (id), \tFOREIGN KEY(id) REFERENCES "table" (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 7 | # --- 8 | # name: test_compile_dynamic_table_with_primary_key 9 | 'CREATE TABLE test_table_2 (\tid INTEGER NOT NULL AUTOINCREMENT, \tgeom VARCHAR, \tPRIMARY KEY (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 10 | # --- 11 | # name: test_compile_snowflake_table 12 | 'CREATE TABLE test_table_1 (\tid INTEGER, \tgeom VARCHAR)\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 13 | # --- 14 | # name: test_compile_snowflake_table_orm_with_str_keys 15 | 'CREATE TABLE "SCHEMA_DB".test_snowflake_table_orm_2 (\tid INTEGER NOT NULL AUTOINCREMENT, \tname VARCHAR, \tPRIMARY KEY (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 16 | # --- 17 | # name: test_compile_snowflake_table_with_explicit_options 18 | 'CREATE TABLE test_table_2 (\tid INTEGER, \tgeom VARCHAR)\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 19 | # --- 20 | # name: test_compile_snowflake_table_with_foreign_key 21 | 'CREATE TABLE test_table_2 (\tid INTEGER NOT NULL, \tgeom VARCHAR, \tPRIMARY KEY (id), \tFOREIGN KEY(id) REFERENCES "table" (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 22 | # --- 23 | # name: test_compile_snowflake_table_with_primary_key 24 | 'CREATE TABLE test_table_2 (\tid INTEGER NOT NULL AUTOINCREMENT, \tgeom VARCHAR, \tPRIMARY KEY (id))\tCLUSTER BY (id, id > 100)\tAS SELECT * FROM table' 25 | # --- 26 | # name: test_compile_snowflake_table_with_selectable 27 | 'CREATE TABLE snowflake_test_table_1 (\tid INTEGER, \tgeom VARCHAR)\tAS SELECT test_table_1.id, test_table_1.geom FROM test_table_1 WHERE test_table_1.id = 23' 28 | # --- 29 | # name: test_compile_snowflake_table_with_wrong_option_types 30 | ''' 31 | Invalid parameter type 'AsQueryOption' provided for 'cluster by'. Expected one of the following types: 'ClusterByOption', 'list'. 32 | Invalid parameter type 'ClusterByOption' provided for 'as_query'. Expected one of the following types: 'AsQueryOption', 'str', 'Selectable'. 33 | 34 | ''' 35 | # --- 36 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_create_dynamic_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_create_dynamic_table_without_dynamictable_and_defined_options 3 | CustomOptionsAreOnlySupportedOnSnowflakeTables('Identifier, Literal, TargetLag and other custom options are only supported on Snowflake tables.') 4 | # --- 5 | # name: test_create_dynamic_table_without_dynamictable_class 6 | UnexpectedOptionTypeError('The following options are either unsupported or should be defined using a Snowflake table: as_query, warehouse.') 7 | # --- 8 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_create_hybrid_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_create_hybrid_table 3 | "[(1, 'test')]" 4 | # --- 5 | # name: test_create_hybrid_table_with_multiple_index 6 | ProgrammingError("(snowflake.connector.errors.ProgrammingError) 391480 (0A000): Another index is being built on table 'TEST_HYBRID_TABLE_WITH_MULTIPLE_INDEX'. Only one index can be built at a time. Either cancel the other index creation or wait until it is complete.") 7 | # --- 8 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_create_iceberg_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_create_iceberg_table 3 | ''' 4 | (snowflake.connector.errors.ProgrammingError) 091017 (22000): S3 bucket 'my_example_bucket' does not exist or not authorized. 5 | [SQL: 6 | CREATE ICEBERG TABLE "Iceberg_Table_1" ( 7 | id INTEGER NOT NULL AUTOINCREMENT, 8 | geom VARCHAR, 9 | PRIMARY KEY (id) 10 | ) EXTERNAL_VOLUME = 'exvol' CATALOG = 'SNOWFLAKE' BASE_LOCATION = 'my_iceberg_table' 11 | 12 | ] 13 | ''' 14 | # --- 15 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_create_snowflake_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_create_snowflake_table_with_cluster_by 3 | "[(1, 'test')]" 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_generic_options.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_identifier_option_with_wrong_type 3 | InvalidTableParameterTypeError("Invalid parameter type 'int' provided for 'warehouse'. Expected one of the following types: 'IdentifierOption', 'str'.\n") 4 | # --- 5 | # name: test_identifier_option_without_name 6 | OptionKeyNotProvidedError('Expected option key in IdentifierOption option but got NoneType instead.') 7 | # --- 8 | # name: test_invalid_as_query_option 9 | InvalidTableParameterTypeError("Invalid parameter type 'int' provided for 'as_query'. Expected one of the following types: 'AsQueryOption', 'str', 'Selectable'.\n") 10 | # --- 11 | # name: test_literal_option_with_wrong_type 12 | InvalidTableParameterTypeError("Invalid parameter type 'SnowflakeKeyword' provided for 'warehouse'. Expected one of the following types: 'LiteralOption', 'str', 'int'.\n") 13 | # --- 14 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_reflect_hybrid_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_simple_reflection_hybrid_table_as_table 3 | 'CREATE TABLE test_hybrid_table_reflection (\tid DECIMAL(38, 0) NOT NULL, \tname VARCHAR(16777216), \tCONSTRAINT demo_name PRIMARY KEY (id))' 4 | # --- 5 | -------------------------------------------------------------------------------- /tests/custom_tables/__snapshots__/test_reflect_snowflake_table.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_inspect_snowflake_table 3 | list([ 4 | dict({ 5 | 'autoincrement': False, 6 | 'comment': None, 7 | 'default': None, 8 | 'name': 'id', 9 | 'nullable': False, 10 | 'primary_key': True, 11 | 'type': _CUSTOM_DECIMAL(precision=38, scale=0), 12 | }), 13 | dict({ 14 | 'autoincrement': False, 15 | 'comment': None, 16 | 'default': None, 17 | 'name': 'name', 18 | 'nullable': True, 19 | 'primary_key': False, 20 | 'type': VARCHAR(length=16777216), 21 | }), 22 | ]) 23 | # --- 24 | # name: test_simple_reflection_of_table_as_snowflake_table 25 | 'CREATE TABLE test_snowflake_table_reflection (\tid DECIMAL(38, 0) NOT NULL, \tname VARCHAR(16777216), \tCONSTRAINT demo_name PRIMARY KEY (id))' 26 | # --- 27 | # name: test_simple_reflection_of_table_as_sqlalchemy_table 28 | 'CREATE TABLE test_snowflake_table_reflection (\tid DECIMAL(38, 0) NOT NULL, \tname VARCHAR(16777216), \tCONSTRAINT demo_name PRIMARY KEY (id))' 29 | # --- 30 | -------------------------------------------------------------------------------- /tests/custom_tables/test_compile_hybrid_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy import Column, Integer, MetaData, String 6 | from sqlalchemy.orm import declarative_base 7 | from sqlalchemy.sql.ddl import CreateTable 8 | 9 | from snowflake.sqlalchemy import ARRAY, GEOMETRY, HybridTable 10 | 11 | 12 | def test_compile_hybrid_table(sql_compiler, snapshot): 13 | metadata = MetaData() 14 | table_name = "test_hybrid_table" 15 | test_geometry = HybridTable( 16 | table_name, 17 | metadata, 18 | Column("id", Integer, primary_key=True), 19 | Column("name", String), 20 | Column("geom", GEOMETRY), 21 | ) 22 | 23 | value = CreateTable(test_geometry) 24 | 25 | actual = sql_compiler(value) 26 | 27 | assert actual == snapshot 28 | 29 | 30 | def test_compile_hybrid_table_with_array(sql_compiler, snapshot): 31 | metadata = MetaData() 32 | table_name = "test_hybrid_table" 33 | test_geometry = HybridTable( 34 | table_name, 35 | metadata, 36 | Column("id", Integer, primary_key=True), 37 | Column("name", String), 38 | Column("geom", GEOMETRY), 39 | Column("array", ARRAY), 40 | ) 41 | 42 | value = CreateTable(test_geometry) 43 | 44 | actual = sql_compiler(value) 45 | 46 | assert actual == snapshot 47 | 48 | 49 | def test_compile_hybrid_table_orm(sql_compiler, snapshot): 50 | Base = declarative_base() 51 | 52 | class TestHybridTableOrm(Base): 53 | __tablename__ = "test_hybrid_table_orm" 54 | 55 | @classmethod 56 | def __table_cls__(cls, name, metadata, *arg, **kw): 57 | return HybridTable(name, metadata, *arg, **kw) 58 | 59 | id = Column(Integer, primary_key=True) 60 | name = Column(String) 61 | 62 | def __repr__(self): 63 | return f"" 64 | 65 | value = CreateTable(TestHybridTableOrm.__table__) 66 | 67 | actual = sql_compiler(value) 68 | 69 | assert actual == snapshot 70 | -------------------------------------------------------------------------------- /tests/custom_tables/test_compile_iceberg_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, String 6 | from sqlalchemy.exc import ArgumentError 7 | from sqlalchemy.orm import declarative_base 8 | from sqlalchemy.sql.ddl import CreateTable 9 | 10 | from snowflake.sqlalchemy import IcebergTable 11 | from snowflake.sqlalchemy.sql.custom_schema.options import ( 12 | IdentifierOption, 13 | LiteralOption, 14 | ) 15 | 16 | 17 | def test_compile_iceberg_table(sql_compiler, snapshot): 18 | metadata = MetaData() 19 | table_name = "test_iceberg_table" 20 | test_table = IcebergTable( 21 | table_name, 22 | metadata, 23 | Column("id", Integer), 24 | Column("geom", String), 25 | external_volume="my_external_volume", 26 | base_location="my_iceberg_table", 27 | ) 28 | 29 | value = CreateTable(test_table) 30 | 31 | actual = sql_compiler(value) 32 | 33 | assert actual == snapshot 34 | 35 | 36 | def test_compile_iceberg_table_with_options_objects(sql_compiler, snapshot): 37 | metadata = MetaData() 38 | table_name = "test_iceberg_table_with_options" 39 | test_table = IcebergTable( 40 | table_name, 41 | metadata, 42 | Column("id", Integer), 43 | Column("geom", String), 44 | external_volume=LiteralOption("my_external_volume"), 45 | base_location=LiteralOption("my_iceberg_table"), 46 | ) 47 | 48 | value = CreateTable(test_table) 49 | 50 | actual = sql_compiler(value) 51 | 52 | assert actual == snapshot 53 | 54 | 55 | def test_compile_iceberg_table_with_one_wrong_option_types(snapshot): 56 | metadata = MetaData() 57 | table_name = "test_wrong_iceberg_table" 58 | with pytest.raises(ArgumentError) as argument_error: 59 | IcebergTable( 60 | table_name, 61 | metadata, 62 | Column("id", Integer), 63 | Column("geom", String), 64 | external_volume=IdentifierOption("my_external_volume"), 65 | base_location=LiteralOption("my_iceberg_table"), 66 | ) 67 | 68 | assert str(argument_error.value) == snapshot 69 | 70 | 71 | def test_compile_icberg_table_with_primary_key(sql_compiler, snapshot): 72 | metadata = MetaData() 73 | table_name = "test_iceberg_table_with_options" 74 | test_table = IcebergTable( 75 | table_name, 76 | metadata, 77 | Column("id", Integer, primary_key=True), 78 | Column("geom", String), 79 | external_volume=LiteralOption("my_external_volume"), 80 | base_location=LiteralOption("my_iceberg_table"), 81 | ) 82 | 83 | value = CreateTable(test_table) 84 | 85 | actual = sql_compiler(value) 86 | 87 | assert actual == snapshot 88 | 89 | 90 | def test_compile_dynamic_table_orm_with_as_query(sql_compiler, snapshot): 91 | Base = declarative_base() 92 | 93 | class TestDynamicTableOrm(Base): 94 | __tablename__ = "test_iceberg_table_orm_2" 95 | 96 | @classmethod 97 | def __table_cls__(cls, name, metadata, *arg, **kw): 98 | return IcebergTable(name, metadata, *arg, **kw) 99 | 100 | __table_args__ = { 101 | "external_volume": "my_external_volume", 102 | "base_location": "my_iceberg_table", 103 | "as_query": "SELECT * FROM table", 104 | } 105 | 106 | id = Column(Integer, primary_key=True) 107 | name = Column(String) 108 | 109 | def __repr__(self): 110 | return f"" 111 | 112 | value = CreateTable(TestDynamicTableOrm.__table__) 113 | 114 | actual = sql_compiler(value) 115 | 116 | assert actual == snapshot 117 | -------------------------------------------------------------------------------- /tests/custom_tables/test_compile_snowflake_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import ( 6 | Column, 7 | ForeignKeyConstraint, 8 | Integer, 9 | MetaData, 10 | String, 11 | select, 12 | text, 13 | ) 14 | from sqlalchemy.exc import ArgumentError 15 | from sqlalchemy.orm import declarative_base 16 | from sqlalchemy.sql.ddl import CreateTable 17 | 18 | from snowflake.sqlalchemy import SnowflakeTable 19 | from snowflake.sqlalchemy.sql.custom_schema.options import ( 20 | AsQueryOption, 21 | ClusterByOption, 22 | ) 23 | 24 | 25 | def test_compile_snowflake_table(sql_compiler, snapshot): 26 | metadata = MetaData() 27 | table_name = "test_table_1" 28 | test_geometry = SnowflakeTable( 29 | table_name, 30 | metadata, 31 | Column("id", Integer), 32 | Column("geom", String), 33 | cluster_by=["id", text("id > 100")], 34 | as_query="SELECT * FROM table", 35 | ) 36 | 37 | value = CreateTable(test_geometry) 38 | 39 | actual = sql_compiler(value) 40 | 41 | assert actual == snapshot 42 | 43 | 44 | def test_compile_snowflake_table_with_explicit_options(sql_compiler, snapshot): 45 | metadata = MetaData() 46 | table_name = "test_table_2" 47 | test_geometry = SnowflakeTable( 48 | table_name, 49 | metadata, 50 | Column("id", Integer), 51 | Column("geom", String), 52 | cluster_by=ClusterByOption("id", text("id > 100")), 53 | as_query=AsQueryOption("SELECT * FROM table"), 54 | ) 55 | 56 | value = CreateTable(test_geometry) 57 | 58 | actual = sql_compiler(value) 59 | 60 | assert actual == snapshot 61 | 62 | 63 | def test_compile_snowflake_table_with_wrong_option_types(snapshot): 64 | metadata = MetaData() 65 | table_name = "test_snowflake_table" 66 | with pytest.raises(ArgumentError) as argument_error: 67 | SnowflakeTable( 68 | table_name, 69 | metadata, 70 | Column("id", Integer), 71 | Column("geom", String), 72 | as_query=ClusterByOption("id", text("id > 100")), 73 | cluster_by=AsQueryOption("SELECT * FROM table"), 74 | ) 75 | 76 | assert str(argument_error.value) == snapshot 77 | 78 | 79 | def test_compile_snowflake_table_with_primary_key(sql_compiler, snapshot): 80 | metadata = MetaData() 81 | table_name = "test_table_2" 82 | test_geometry = SnowflakeTable( 83 | table_name, 84 | metadata, 85 | Column("id", Integer, primary_key=True), 86 | Column("geom", String), 87 | cluster_by=ClusterByOption("id", text("id > 100")), 88 | as_query=AsQueryOption("SELECT * FROM table"), 89 | ) 90 | 91 | value = CreateTable(test_geometry) 92 | 93 | actual = sql_compiler(value) 94 | 95 | assert actual == snapshot 96 | 97 | 98 | def test_compile_snowflake_table_with_foreign_key(sql_compiler, snapshot): 99 | metadata = MetaData() 100 | 101 | SnowflakeTable( 102 | "table", 103 | metadata, 104 | Column("id", Integer, primary_key=True), 105 | Column("geom", String), 106 | ForeignKeyConstraint(["id"], ["table.id"]), 107 | cluster_by=ClusterByOption("id", text("id > 100")), 108 | as_query=AsQueryOption("SELECT * FROM table"), 109 | ) 110 | 111 | table_name = "test_table_2" 112 | test_geometry = SnowflakeTable( 113 | table_name, 114 | metadata, 115 | Column("id", Integer, primary_key=True), 116 | Column("geom", String), 117 | ForeignKeyConstraint(["id"], ["table.id"]), 118 | cluster_by=ClusterByOption("id", text("id > 100")), 119 | as_query=AsQueryOption("SELECT * FROM table"), 120 | ) 121 | 122 | value = CreateTable(test_geometry) 123 | 124 | actual = sql_compiler(value) 125 | 126 | assert actual == snapshot 127 | 128 | 129 | def test_compile_snowflake_table_orm_with_str_keys(sql_compiler, snapshot): 130 | Base = declarative_base() 131 | 132 | class TestSnowflakeTableOrm(Base): 133 | __tablename__ = "test_snowflake_table_orm_2" 134 | 135 | @classmethod 136 | def __table_cls__(cls, name, metadata, *arg, **kw): 137 | return SnowflakeTable(name, metadata, *arg, **kw) 138 | 139 | __table_args__ = { 140 | "schema": "SCHEMA_DB", 141 | "cluster_by": ["id", text("id > 100")], 142 | "as_query": "SELECT * FROM table", 143 | } 144 | 145 | id = Column(Integer, primary_key=True) 146 | name = Column(String) 147 | 148 | def __repr__(self): 149 | return f"" 150 | 151 | value = CreateTable(TestSnowflakeTableOrm.__table__) 152 | 153 | actual = sql_compiler(value) 154 | 155 | assert actual == snapshot 156 | 157 | 158 | def test_compile_snowflake_table_with_selectable(sql_compiler, snapshot): 159 | Base = declarative_base() 160 | 161 | test_table_1 = SnowflakeTable( 162 | "test_table_1", 163 | Base.metadata, 164 | Column("id", Integer, primary_key=True), 165 | Column("geom", String), 166 | ForeignKeyConstraint(["id"], ["table.id"]), 167 | cluster_by=ClusterByOption("id", text("id > 100")), 168 | ) 169 | 170 | test_table_2 = SnowflakeTable( 171 | "snowflake_test_table_1", 172 | Base.metadata, 173 | as_query=select(test_table_1).where(test_table_1.c.id == 23), 174 | ) 175 | 176 | value = CreateTable(test_table_2) 177 | 178 | actual = sql_compiler(value) 179 | 180 | assert actual == snapshot 181 | -------------------------------------------------------------------------------- /tests/custom_tables/test_create_dynamic_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, String, Table, select 6 | 7 | from snowflake.sqlalchemy import DynamicTable, exc 8 | from snowflake.sqlalchemy.sql.custom_schema.options.as_query_option import AsQueryOption 9 | from snowflake.sqlalchemy.sql.custom_schema.options.identifier_option import ( 10 | IdentifierOption, 11 | ) 12 | from snowflake.sqlalchemy.sql.custom_schema.options.keywords import SnowflakeKeyword 13 | from snowflake.sqlalchemy.sql.custom_schema.options.table_option import TableOptionKey 14 | from snowflake.sqlalchemy.sql.custom_schema.options.target_lag_option import ( 15 | TargetLagOption, 16 | TimeUnit, 17 | ) 18 | 19 | 20 | def test_create_dynamic_table(engine_testaccount, db_parameters): 21 | warehouse = db_parameters.get("warehouse", "default") 22 | metadata = MetaData() 23 | test_table_1 = Table( 24 | "test_table_1", metadata, Column("id", Integer), Column("name", String) 25 | ) 26 | 27 | metadata.create_all(engine_testaccount) 28 | 29 | with engine_testaccount.connect() as conn: 30 | ins = test_table_1.insert().values(id=1, name="test") 31 | 32 | conn.execute(ins) 33 | conn.commit() 34 | 35 | dynamic_test_table_1 = DynamicTable( 36 | "dynamic_test_table_1", 37 | metadata, 38 | Column("id", Integer), 39 | Column("name", String), 40 | target_lag=(1, TimeUnit.HOURS), 41 | warehouse=warehouse, 42 | as_query="SELECT id, name from test_table_1;", 43 | refresh_mode=SnowflakeKeyword.FULL, 44 | ) 45 | 46 | metadata.create_all(engine_testaccount) 47 | 48 | try: 49 | with engine_testaccount.connect() as conn: 50 | s = select(dynamic_test_table_1) 51 | results_dynamic_table = conn.execute(s).fetchall() 52 | s = select(test_table_1) 53 | results_table = conn.execute(s).fetchall() 54 | assert results_dynamic_table == results_table 55 | 56 | finally: 57 | metadata.drop_all(engine_testaccount) 58 | 59 | 60 | def test_create_dynamic_table_without_dynamictable_class( 61 | engine_testaccount, db_parameters, snapshot 62 | ): 63 | warehouse = db_parameters.get("warehouse", "default") 64 | metadata = MetaData() 65 | test_table_1 = Table( 66 | "test_table_1", metadata, Column("id", Integer), Column("name", String) 67 | ) 68 | 69 | metadata.create_all(engine_testaccount) 70 | 71 | with engine_testaccount.connect() as conn: 72 | ins = test_table_1.insert().values(id=1, name="test") 73 | 74 | conn.execute(ins) 75 | conn.commit() 76 | 77 | Table( 78 | "dynamic_test_table_1", 79 | metadata, 80 | Column("id", Integer), 81 | Column("name", String), 82 | snowflake_warehouse=warehouse, 83 | snowflake_as_query="SELECT id, name from test_table_1;", 84 | prefixes=["DYNAMIC"], 85 | ) 86 | 87 | with pytest.raises(exc.UnexpectedOptionTypeError) as exc_info: 88 | metadata.create_all(engine_testaccount) 89 | assert exc_info.value == snapshot 90 | 91 | 92 | def test_create_dynamic_table_without_dynamictable_and_defined_options( 93 | engine_testaccount, db_parameters, snapshot 94 | ): 95 | warehouse = db_parameters.get("warehouse", "default") 96 | metadata = MetaData() 97 | test_table_1 = Table( 98 | "test_table_1", metadata, Column("id", Integer), Column("name", String) 99 | ) 100 | 101 | metadata.create_all(engine_testaccount) 102 | 103 | with engine_testaccount.connect() as conn: 104 | ins = test_table_1.insert().values(id=1, name="test") 105 | 106 | conn.execute(ins) 107 | conn.commit() 108 | 109 | Table( 110 | "dynamic_test_table_1", 111 | metadata, 112 | Column("id", Integer), 113 | Column("name", String), 114 | snowflake_target_lag=TargetLagOption.create((1, TimeUnit.HOURS)), 115 | snowflake_warehouse=IdentifierOption.create( 116 | TableOptionKey.WAREHOUSE, warehouse 117 | ), 118 | snowflake_as_query=AsQueryOption.create("SELECT id, name from test_table_1;"), 119 | prefixes=["DYNAMIC"], 120 | ) 121 | 122 | with pytest.raises(exc.CustomOptionsAreOnlySupportedOnSnowflakeTables) as exc_info: 123 | metadata.create_all(engine_testaccount) 124 | assert exc_info.value == snapshot 125 | -------------------------------------------------------------------------------- /tests/custom_tables/test_create_hybrid_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | import sqlalchemy.exc 6 | from sqlalchemy import Column, Index, Integer, MetaData, String, select 7 | from sqlalchemy.orm import Session, declarative_base 8 | 9 | from snowflake.sqlalchemy import HybridTable 10 | 11 | 12 | @pytest.mark.aws 13 | def test_create_hybrid_table(engine_testaccount, db_parameters, snapshot): 14 | metadata = MetaData() 15 | table_name = "test_create_hybrid_table" 16 | 17 | dynamic_test_table_1 = HybridTable( 18 | table_name, 19 | metadata, 20 | Column("id", Integer, primary_key=True), 21 | Column("name", String), 22 | ) 23 | 24 | metadata.create_all(engine_testaccount) 25 | 26 | with engine_testaccount.connect() as conn: 27 | ins = dynamic_test_table_1.insert().values(id=1, name="test") 28 | conn.execute(ins) 29 | conn.commit() 30 | 31 | try: 32 | with engine_testaccount.connect() as conn: 33 | s = select(dynamic_test_table_1) 34 | results_hybrid_table = conn.execute(s).fetchall() 35 | assert str(results_hybrid_table) == snapshot 36 | finally: 37 | metadata.drop_all(engine_testaccount) 38 | 39 | 40 | @pytest.mark.aws 41 | def test_create_hybrid_table_with_multiple_index( 42 | engine_testaccount, db_parameters, snapshot, sql_compiler 43 | ): 44 | metadata = MetaData() 45 | table_name = "test_hybrid_table_with_multiple_index" 46 | 47 | hybrid_test_table_1 = HybridTable( 48 | table_name, 49 | metadata, 50 | Column("id", Integer, primary_key=True), 51 | Column("name", String, index=True), 52 | Column("name2", String), 53 | Column("name3", String), 54 | ) 55 | 56 | metadata.create_all(engine_testaccount) 57 | 58 | index = Index("idx_col34", hybrid_test_table_1.c.name2, hybrid_test_table_1.c.name3) 59 | 60 | with pytest.raises(sqlalchemy.exc.ProgrammingError) as exc_info: 61 | index.create(engine_testaccount) 62 | try: 63 | assert exc_info.value == snapshot 64 | finally: 65 | metadata.drop_all(engine_testaccount) 66 | 67 | 68 | @pytest.mark.aws 69 | def test_create_hybrid_table_with_orm(sql_compiler, engine_testaccount): 70 | Base = declarative_base() 71 | session = Session(bind=engine_testaccount) 72 | 73 | class TestHybridTableOrm(Base): 74 | __tablename__ = "test_hybrid_table_orm" 75 | 76 | @classmethod 77 | def __table_cls__(cls, name, metadata, *arg, **kw): 78 | return HybridTable(name, metadata, *arg, **kw) 79 | 80 | id = Column(Integer, primary_key=True) 81 | name = Column(String) 82 | 83 | def __repr__(self): 84 | return f"({self.id!r}, {self.name!r})" 85 | 86 | Base.metadata.create_all(engine_testaccount) 87 | 88 | try: 89 | instance = TestHybridTableOrm(id=0, name="name_example") 90 | session.add(instance) 91 | session.commit() 92 | data = session.query(TestHybridTableOrm).all() 93 | assert str(data) == "[(0, 'name_example')]" 94 | finally: 95 | Base.metadata.drop_all(engine_testaccount) 96 | -------------------------------------------------------------------------------- /tests/custom_tables/test_create_iceberg_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, String 6 | from sqlalchemy.exc import ProgrammingError 7 | 8 | from snowflake.sqlalchemy import IcebergTable 9 | 10 | 11 | @pytest.mark.aws 12 | def test_create_iceberg_table(engine_testaccount): 13 | metadata = MetaData() 14 | external_volume_name = "exvol" 15 | create_external_volume = f""" 16 | CREATE OR REPLACE EXTERNAL VOLUME {external_volume_name} 17 | STORAGE_LOCATIONS = 18 | ( 19 | ( 20 | NAME = 'my-s3-us-west-2' 21 | STORAGE_PROVIDER = 'S3' 22 | STORAGE_BASE_URL = 's3://myexamplebucket/' 23 | STORAGE_AWS_ROLE_ARN = 'arn:aws:iam::123456789012:role/myrole' 24 | ENCRYPTION=(TYPE='AWS_SSE_KMS' KMS_KEY_ID='1234abcd-12ab-34cd-56ef-1234567890ab') 25 | ) 26 | ); 27 | """ 28 | with engine_testaccount.connect() as connection: 29 | connection.exec_driver_sql(create_external_volume) 30 | IcebergTable( 31 | "Iceberg_Table_1", 32 | metadata, 33 | Column("id", Integer, primary_key=True), 34 | Column("geom", String), 35 | external_volume=external_volume_name, 36 | base_location="my_iceberg_table", 37 | ) 38 | 39 | with pytest.raises(ProgrammingError) as argument_error: 40 | metadata.create_all(engine_testaccount) 41 | 42 | error_str = str(argument_error.value) 43 | assert ( 44 | "(snowflake.connector.errors.ProgrammingError)" 45 | in error_str[: error_str.rfind("\n")] 46 | ) 47 | -------------------------------------------------------------------------------- /tests/custom_tables/test_create_snowflake_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from sqlalchemy import Column, Integer, MetaData, String, select, text 5 | from sqlalchemy.orm import Session, declarative_base 6 | 7 | from snowflake.sqlalchemy import SnowflakeTable 8 | 9 | 10 | def test_create_snowflake_table_with_cluster_by( 11 | engine_testaccount, db_parameters, snapshot 12 | ): 13 | metadata = MetaData() 14 | table_name = "test_create_snowflake_table" 15 | 16 | test_table_1 = SnowflakeTable( 17 | table_name, 18 | metadata, 19 | Column("id", Integer, primary_key=True), 20 | Column("name", String), 21 | cluster_by=["id", text("id > 5")], 22 | ) 23 | 24 | metadata.create_all(engine_testaccount) 25 | 26 | with engine_testaccount.connect() as conn: 27 | ins = test_table_1.insert().values(id=1, name="test") 28 | conn.execute(ins) 29 | conn.commit() 30 | 31 | try: 32 | with engine_testaccount.connect() as conn: 33 | s = select(test_table_1) 34 | results_hybrid_table = conn.execute(s).fetchall() 35 | assert str(results_hybrid_table) == snapshot 36 | finally: 37 | metadata.drop_all(engine_testaccount) 38 | 39 | 40 | def test_create_snowflake_table_with_orm(sql_compiler, engine_testaccount): 41 | Base = declarative_base() 42 | session = Session(bind=engine_testaccount) 43 | 44 | class TestHybridTableOrm(Base): 45 | __tablename__ = "test_snowflake_table_orm" 46 | 47 | @classmethod 48 | def __table_cls__(cls, name, metadata, *arg, **kw): 49 | return SnowflakeTable(name, metadata, *arg, **kw) 50 | 51 | id = Column(Integer, primary_key=True) 52 | name = Column(String) 53 | 54 | def __repr__(self): 55 | return f"({self.id!r}, {self.name!r})" 56 | 57 | Base.metadata.create_all(engine_testaccount) 58 | 59 | try: 60 | instance = TestHybridTableOrm(id=0, name="name_example") 61 | session.add(instance) 62 | session.commit() 63 | data = session.query(TestHybridTableOrm).all() 64 | assert str(data) == "[(0, 'name_example')]" 65 | finally: 66 | Base.metadata.drop_all(engine_testaccount) 67 | -------------------------------------------------------------------------------- /tests/custom_tables/test_generic_options.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | import pytest 5 | 6 | from snowflake.sqlalchemy import ( 7 | AsQueryOption, 8 | IdentifierOption, 9 | KeywordOption, 10 | LiteralOption, 11 | SnowflakeKeyword, 12 | TableOptionKey, 13 | TargetLagOption, 14 | exc, 15 | ) 16 | from snowflake.sqlalchemy.sql.custom_schema.options.invalid_table_option import ( 17 | InvalidTableOption, 18 | ) 19 | 20 | 21 | def test_identifier_option(): 22 | identifier = IdentifierOption.create(TableOptionKey.WAREHOUSE, "xsmall") 23 | assert identifier.render_option(None) == "WAREHOUSE = xsmall" 24 | 25 | 26 | def test_literal_option(): 27 | literal = LiteralOption.create(TableOptionKey.WAREHOUSE, "xsmall") 28 | assert literal.render_option(None) == "WAREHOUSE = 'xsmall'" 29 | 30 | 31 | def test_identifier_option_without_name(snapshot): 32 | identifier = IdentifierOption("xsmall") 33 | with pytest.raises(exc.OptionKeyNotProvidedError) as exc_info: 34 | identifier.render_option(None) 35 | assert exc_info.value == snapshot 36 | 37 | 38 | def test_identifier_option_with_wrong_type(snapshot): 39 | identifier = IdentifierOption.create(TableOptionKey.WAREHOUSE, 23) 40 | with pytest.raises(exc.InvalidTableParameterTypeError) as exc_info: 41 | identifier.render_option(None) 42 | assert exc_info.value == snapshot 43 | 44 | 45 | def test_literal_option_with_wrong_type(snapshot): 46 | literal = LiteralOption.create( 47 | TableOptionKey.WAREHOUSE, SnowflakeKeyword.DOWNSTREAM 48 | ) 49 | with pytest.raises(exc.InvalidTableParameterTypeError) as exc_info: 50 | literal.render_option(None) 51 | assert exc_info.value == snapshot 52 | 53 | 54 | def test_invalid_as_query_option(snapshot): 55 | as_query = AsQueryOption.create(23) 56 | with pytest.raises(exc.InvalidTableParameterTypeError) as exc_info: 57 | as_query.render_option(None) 58 | assert exc_info.value == snapshot 59 | 60 | 61 | @pytest.mark.parametrize( 62 | "table_option", 63 | [ 64 | IdentifierOption, 65 | LiteralOption, 66 | KeywordOption, 67 | ], 68 | ) 69 | def test_generic_option_with_wrong_type(table_option): 70 | literal = table_option.create(TableOptionKey.WAREHOUSE, 0.32) 71 | assert isinstance(literal, InvalidTableOption), "Expected InvalidTableOption" 72 | 73 | 74 | @pytest.mark.parametrize( 75 | "table_option", 76 | [ 77 | TargetLagOption, 78 | AsQueryOption, 79 | ], 80 | ) 81 | def test_non_generic_option_with_wrong_type(table_option): 82 | literal = table_option.create(0.32) 83 | assert isinstance(literal, InvalidTableOption), "Expected InvalidTableOption" 84 | -------------------------------------------------------------------------------- /tests/custom_tables/test_reflect_dynamic_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from sqlalchemy import Column, Integer, MetaData, String, Table, select 5 | 6 | from snowflake.sqlalchemy import DynamicTable 7 | from snowflake.sqlalchemy.custom_commands import NoneType 8 | 9 | 10 | def test_simple_reflection_dynamic_table_as_table(engine_testaccount, db_parameters): 11 | warehouse = db_parameters.get("warehouse", "default") 12 | metadata = MetaData() 13 | test_table_1 = Table( 14 | "test_table_1", metadata, Column("id", Integer), Column("name", String) 15 | ) 16 | 17 | metadata.create_all(engine_testaccount) 18 | 19 | with engine_testaccount.connect() as conn: 20 | ins = test_table_1.insert().values(id=1, name="test") 21 | 22 | conn.execute(ins) 23 | conn.commit() 24 | create_table_sql = f""" 25 | CREATE DYNAMIC TABLE dynamic_test_table (id INT, name VARCHAR) 26 | TARGET_LAG = '20 minutes' 27 | WAREHOUSE = {warehouse} 28 | AS SELECT id, name from test_table_1; 29 | """ 30 | with engine_testaccount.connect() as connection: 31 | connection.exec_driver_sql(create_table_sql) 32 | 33 | dynamic_test_table = Table( 34 | "dynamic_test_table", metadata, autoload_with=engine_testaccount 35 | ) 36 | 37 | try: 38 | with engine_testaccount.connect() as conn: 39 | s = select(dynamic_test_table) 40 | results_dynamic_table = conn.execute(s).fetchall() 41 | s = select(test_table_1) 42 | results_table = conn.execute(s).fetchall() 43 | assert results_dynamic_table == results_table 44 | 45 | finally: 46 | metadata.drop_all(engine_testaccount) 47 | 48 | 49 | def test_simple_reflection_without_options_loading(engine_testaccount, db_parameters): 50 | warehouse = db_parameters.get("warehouse", "default") 51 | metadata = MetaData() 52 | test_table_1 = Table( 53 | "test_table_1", metadata, Column("id", Integer), Column("name", String) 54 | ) 55 | 56 | metadata.create_all(engine_testaccount) 57 | 58 | with engine_testaccount.connect() as conn: 59 | ins = test_table_1.insert().values(id=1, name="test") 60 | 61 | conn.execute(ins) 62 | conn.commit() 63 | create_table_sql = f""" 64 | CREATE DYNAMIC TABLE dynamic_test_table (id INT, name VARCHAR) 65 | TARGET_LAG = '20 minutes' 66 | WAREHOUSE = {warehouse} 67 | AS SELECT id, name from test_table_1; 68 | """ 69 | with engine_testaccount.connect() as connection: 70 | connection.exec_driver_sql(create_table_sql) 71 | 72 | dynamic_test_table = DynamicTable( 73 | "dynamic_test_table", metadata, autoload_with=engine_testaccount 74 | ) 75 | 76 | # TODO: Add support for loading options when table is reflected 77 | assert isinstance(dynamic_test_table.warehouse, NoneType) 78 | 79 | try: 80 | with engine_testaccount.connect() as conn: 81 | s = select(dynamic_test_table) 82 | results_dynamic_table = conn.execute(s).fetchall() 83 | s = select(test_table_1) 84 | results_table = conn.execute(s).fetchall() 85 | assert results_dynamic_table == results_table 86 | 87 | finally: 88 | metadata.drop_all(engine_testaccount) 89 | -------------------------------------------------------------------------------- /tests/custom_tables/test_reflect_hybrid_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import MetaData, Table 6 | from sqlalchemy.sql.ddl import CreateTable 7 | 8 | 9 | @pytest.mark.aws 10 | def test_simple_reflection_hybrid_table_as_table( 11 | engine_testaccount, db_parameters, sql_compiler, snapshot 12 | ): 13 | metadata = MetaData() 14 | table_name = "test_hybrid_table_reflection" 15 | 16 | create_table_sql = f""" 17 | CREATE HYBRID TABLE {table_name} (id INT primary key, name VARCHAR, INDEX index_name (name)); 18 | """ 19 | 20 | with engine_testaccount.connect() as connection: 21 | connection.exec_driver_sql(create_table_sql) 22 | 23 | hybrid_test_table = Table(table_name, metadata, autoload_with=engine_testaccount) 24 | 25 | constraint = hybrid_test_table.constraints.pop() 26 | constraint.name = "demo_name" 27 | hybrid_test_table.constraints.add(constraint) 28 | 29 | try: 30 | with engine_testaccount.connect(): 31 | value = CreateTable(hybrid_test_table) 32 | 33 | actual = sql_compiler(value) 34 | 35 | # Prefixes reflection not supported, example: "HYBRID, DYNAMIC" 36 | assert actual == snapshot 37 | 38 | finally: 39 | metadata.drop_all(engine_testaccount) 40 | 41 | 42 | @pytest.mark.aws 43 | def test_reflect_hybrid_table_with_index( 44 | engine_testaccount, db_parameters, sql_compiler 45 | ): 46 | metadata = MetaData() 47 | schema = db_parameters["schema"] 48 | 49 | table_name = "test_hybrid_table_2" 50 | index_name = "INDEX_NAME_2" 51 | 52 | create_table_sql = f""" 53 | CREATE HYBRID TABLE {table_name} (id INT primary key, name VARCHAR, INDEX {index_name} (name)); 54 | """ 55 | 56 | with engine_testaccount.connect() as connection: 57 | connection.exec_driver_sql(create_table_sql) 58 | 59 | table = Table(table_name, metadata, schema=schema, autoload_with=engine_testaccount) 60 | 61 | try: 62 | assert len(table.indexes) == 1 and table.indexes.pop().name == index_name 63 | 64 | finally: 65 | metadata.drop_all(engine_testaccount) 66 | -------------------------------------------------------------------------------- /tests/custom_tables/test_reflect_snowflake_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from sqlalchemy import MetaData, Table, inspect 5 | from sqlalchemy.sql.ddl import CreateTable 6 | 7 | from snowflake.sqlalchemy import SnowflakeTable 8 | 9 | 10 | def test_simple_reflection_of_table_as_sqlalchemy_table( 11 | engine_testaccount, db_parameters, sql_compiler, snapshot 12 | ): 13 | metadata = MetaData() 14 | table_name = "test_snowflake_table_reflection" 15 | 16 | create_table_sql = f""" 17 | CREATE TABLE {table_name} (id INT primary key, name VARCHAR); 18 | """ 19 | 20 | with engine_testaccount.connect() as connection: 21 | connection.exec_driver_sql(create_table_sql) 22 | 23 | snowflake_test_table = Table(table_name, metadata, autoload_with=engine_testaccount) 24 | constraint = snowflake_test_table.constraints.pop() 25 | constraint.name = "demo_name" 26 | snowflake_test_table.constraints.add(constraint) 27 | 28 | try: 29 | with engine_testaccount.connect(): 30 | value = CreateTable(snowflake_test_table) 31 | 32 | actual = sql_compiler(value) 33 | 34 | assert actual == snapshot 35 | 36 | finally: 37 | metadata.drop_all(engine_testaccount) 38 | 39 | 40 | def test_simple_reflection_of_table_as_snowflake_table( 41 | engine_testaccount, db_parameters, sql_compiler, snapshot 42 | ): 43 | metadata = MetaData() 44 | table_name = "test_snowflake_table_reflection" 45 | 46 | create_table_sql = f""" 47 | CREATE TABLE {table_name} (id INT primary key, name VARCHAR); 48 | """ 49 | 50 | with engine_testaccount.connect() as connection: 51 | connection.exec_driver_sql(create_table_sql) 52 | 53 | snowflake_test_table = SnowflakeTable( 54 | table_name, metadata, autoload_with=engine_testaccount 55 | ) 56 | constraint = snowflake_test_table.constraints.pop() 57 | constraint.name = "demo_name" 58 | snowflake_test_table.constraints.add(constraint) 59 | 60 | try: 61 | with engine_testaccount.connect(): 62 | value = CreateTable(snowflake_test_table) 63 | 64 | actual = sql_compiler(value) 65 | 66 | assert actual == snapshot 67 | 68 | finally: 69 | metadata.drop_all(engine_testaccount) 70 | 71 | 72 | def test_inspect_snowflake_table( 73 | engine_testaccount, db_parameters, sql_compiler, snapshot 74 | ): 75 | metadata = MetaData() 76 | table_name = "test_snowflake_table_inspect" 77 | 78 | create_table_sql = f""" 79 | CREATE TABLE {table_name} (id INT primary key, name VARCHAR); 80 | """ 81 | 82 | with engine_testaccount.connect() as connection: 83 | connection.exec_driver_sql(create_table_sql) 84 | 85 | try: 86 | with engine_testaccount.connect() as conn: 87 | insp = inspect(conn) 88 | table = insp.get_columns(table_name) 89 | assert table == snapshot 90 | 91 | finally: 92 | metadata.drop_all(engine_testaccount) 93 | -------------------------------------------------------------------------------- /tests/data/users.txt: -------------------------------------------------------------------------------- 1 | 1,name1,fullname1 2 | 2,name2,fullname2 3 | 3,name3,fullname3 4 | 4,name4,fullname4 5 | 5,name5,fullname5 6 | 6,name6,fullname6 7 | 7,name7,fullname7 8 | 7,name8,fullname8 9 | -------------------------------------------------------------------------------- /tests/sqlalchemy_test_suite/README.md: -------------------------------------------------------------------------------- 1 | # SQLAlchemy Compliance Tests 2 | 3 | SQLAlchemy offers tests to test SQLAlchemy dialects work properly. This directory applies these tests 4 | to the Snowflake SQLAlchemy dialect. 5 | 6 | **Please be aware that the test suites are not collected in pytest by default** -- the directory is ignored in `tox.ini`. 7 | There are majorly three issues with the sqlalchemy test suites: 8 | 1. Importing sqlalchemy pytest plugin will result in Snowflake SQLAlchemy dialect specific tests not 9 | being collected. 10 | 2. Putting test_suites.py and related conftest config under root tests dir would make pytest fail to collect cfg 11 | information such as requirements, and customized session db config does not work as expected (schema not set). 12 | 13 | Running sqlalchemy test suites from a separate directory does not have the aforementioned issues. Thus, we skip the 14 | path in the config, and run test suites separately. 15 | 16 | To run the SQLAlchemy test suites, please specify the directory of the test suites when running pytest command: 17 | 18 | ```bash 19 | $cd snowflake-sqlalchemy 20 | $pytest tests/sqlalchemy_test_suite 21 | ``` 22 | -------------------------------------------------------------------------------- /tests/sqlalchemy_test_suite/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | -------------------------------------------------------------------------------- /tests/sqlalchemy_test_suite/conftest.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import sqlalchemy.testing.config 6 | from sqlalchemy import util 7 | from sqlalchemy.dialects import registry 8 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa 9 | from sqlalchemy.testing.plugin.pytestplugin import ( 10 | pytest_sessionfinish as _pytest_sessionfinish, 11 | ) 12 | from sqlalchemy.testing.plugin.pytestplugin import ( 13 | pytest_sessionstart as _pytest_sessionstart, 14 | ) 15 | 16 | import snowflake.connector 17 | from snowflake.sqlalchemy import URL 18 | from snowflake.sqlalchemy.compat import IS_VERSION_20 19 | 20 | from ..conftest import get_db_parameters 21 | from ..util import random_string 22 | 23 | registry.register("snowflake", "snowflake.sqlalchemy", "dialect") 24 | registry.register("snowflake.snowflake", "snowflake.sqlalchemy", "dialect") 25 | TEST_SCHEMA = f"test_schema_{random_string(5)}" 26 | TEST_SCHEMA_2 = f"{TEST_SCHEMA}_2" 27 | 28 | 29 | if IS_VERSION_20: 30 | collect_ignore_glob = ["test_suite.py"] 31 | else: 32 | collect_ignore_glob = ["test_suite_20.py"] 33 | 34 | 35 | # patch sqlalchemy.testing.config.Confi.__init__ for schema name randomization 36 | # same schema name would result in conflict as we're running tests in parallel in the CI 37 | def config_patched__init__(self, db, db_opts, options, file_config): 38 | self._set_name(db) 39 | self.db = db 40 | self.db_opts = db_opts 41 | self.options = options 42 | self.file_config = file_config 43 | self.test_schema = TEST_SCHEMA 44 | self.test_schema_2 = TEST_SCHEMA_2 45 | 46 | self.is_async = db.dialect.is_async and not util.asbool( 47 | db.url.query.get("async_fallback", False) 48 | ) 49 | 50 | 51 | sqlalchemy.testing.config.Config.__init__ = config_patched__init__ 52 | 53 | 54 | def pytest_sessionstart(session): 55 | db_parameters = get_db_parameters() 56 | session.config.option.dburi = [URL(**db_parameters)] 57 | # schema name with 'TEST_SCHEMA' is required by some tests of the sqlalchemy test suite 58 | with snowflake.connector.connect(**db_parameters) as con: 59 | con.cursor().execute(f"CREATE SCHEMA IF NOT EXISTS {db_parameters['schema']}") 60 | con.cursor().execute(f"CREATE SCHEMA IF NOT EXISTS {TEST_SCHEMA};") 61 | _pytest_sessionstart(session) 62 | 63 | 64 | def pytest_sessionfinish(session): 65 | db_parameters = get_db_parameters() 66 | with snowflake.connector.connect(**db_parameters) as con: 67 | con.cursor().execute(f"DROP SCHEMA IF EXISTS {db_parameters['schema']}") 68 | con.cursor().execute(f"DROP SCHEMA IF EXISTS f{TEST_SCHEMA}") 69 | _pytest_sessionfinish(session) 70 | -------------------------------------------------------------------------------- /tests/test_create.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from snowflake.sqlalchemy import ( 6 | AzureContainer, 7 | CreateFileFormat, 8 | CreateStage, 9 | CSVFormatter, 10 | ExternalStage, 11 | PARQUETFormatter, 12 | ) 13 | 14 | 15 | def test_create_stage(sql_compiler): 16 | """ 17 | This test compiles the SQL to create a named stage, by defining the stage naming 18 | information (namespace and name) and the physical storage information (here: an 19 | Azure container), and combining them in a CreateStage object 20 | NB: The test only validates that the correct SQL is generated. It does not 21 | execute the SQL (yet) against an actual Snowflake instance. 22 | """ 23 | # define the stage name 24 | stage = ExternalStage( 25 | name="AZURE_STAGE", 26 | namespace="MY_DB.MY_SCHEMA", 27 | ) 28 | # define the storage container 29 | container = AzureContainer( 30 | account="myaccount", container="my-container" 31 | ).credentials("saas_token") 32 | # define the stage object 33 | create_stage = CreateStage(stage=stage, container=container) 34 | 35 | # validate that the resulting SQL is as expected 36 | actual = sql_compiler(create_stage) 37 | expected = ( 38 | "CREATE STAGE MY_DB.MY_SCHEMA.AZURE_STAGE " 39 | "URL='azure://myaccount.blob.core.windows.net/my-container' " 40 | "CREDENTIALS=(AZURE_SAS_TOKEN='saas_token')" 41 | ) 42 | assert actual == expected 43 | 44 | create_stage_replace = CreateStage( 45 | stage=stage, container=container, replace_if_exists=True 46 | ) 47 | 48 | # validate that the resulting SQL is as expected 49 | actual = sql_compiler(create_stage_replace) 50 | expected = ( 51 | "CREATE OR REPLACE STAGE MY_DB.MY_SCHEMA.AZURE_STAGE " 52 | "URL='azure://myaccount.blob.core.windows.net/my-container' " 53 | "CREDENTIALS=(AZURE_SAS_TOKEN='saas_token')" 54 | ) 55 | assert actual == expected 56 | 57 | create_stage = CreateStage(stage=stage, container=container, temporary=True) 58 | # validate that the resulting SQL is as expected 59 | actual = sql_compiler(create_stage) 60 | expected = ( 61 | "CREATE TEMPORARY STAGE MY_DB.MY_SCHEMA.AZURE_STAGE " 62 | "URL='azure://myaccount.blob.core.windows.net/my-container' " 63 | "CREDENTIALS=(AZURE_SAS_TOKEN='saas_token')" 64 | ) 65 | assert actual == expected 66 | 67 | 68 | def test_create_csv_format(sql_compiler): 69 | """ 70 | This test compiles the SQL to create a named CSV format. The format is defined 71 | using a name and a formatter object with the detailed formatting information. 72 | TODO: split name parameters into namespace and actual name 73 | 74 | NB: The test only validates that the correct SQL is generated. It does not 75 | execute the SQL (yet) against an actual Snowflake instance. 76 | """ 77 | create_format = CreateFileFormat( 78 | format_name="ML_POC.PUBLIC.CSV_FILE_FORMAT", 79 | formatter=CSVFormatter().field_delimiter(","), 80 | ) 81 | actual = sql_compiler(create_format) 82 | expected = ( 83 | "CREATE FILE FORMAT ML_POC.PUBLIC.CSV_FILE_FORMAT " 84 | "TYPE='csv' FIELD_DELIMITER = ','" 85 | ) 86 | assert actual == expected 87 | 88 | create_format_replace = CreateFileFormat( 89 | format_name="ML_POC.PUBLIC.CSV_FILE_FORMAT", 90 | formatter=CSVFormatter().field_delimiter(","), 91 | replace_if_exists=True, 92 | ) 93 | actual = sql_compiler(create_format_replace) 94 | expected = ( 95 | "CREATE OR REPLACE FILE FORMAT ML_POC.PUBLIC.CSV_FILE_FORMAT " 96 | "TYPE='csv' FIELD_DELIMITER = ','" 97 | ) 98 | assert actual == expected 99 | 100 | 101 | def test_create_parquet_format(sql_compiler): 102 | """ 103 | This test compiles the SQL to create a named Parquet format. The format is defined 104 | using a name and a formatter object with the detailed formatting information. 105 | TODO: split name parameters into namespace and actual name 106 | 107 | NB: The test only validates that the correct SQL is generated. It does not 108 | execute the SQL (yet) against an actual Snowflake instance. 109 | 110 | """ 111 | create_format = CreateFileFormat( 112 | format_name="ML_POC.PUBLIC.CSV_FILE_FORMAT", 113 | formatter=PARQUETFormatter().compression("AUTO"), 114 | ) 115 | actual = sql_compiler(create_format) 116 | expected = ( 117 | "CREATE FILE FORMAT ML_POC.PUBLIC.CSV_FILE_FORMAT " 118 | "TYPE='parquet' COMPRESSION = 'AUTO'" 119 | ) 120 | assert actual == expected 121 | 122 | create_format_replace = CreateFileFormat( 123 | format_name="ML_POC.PUBLIC.CSV_FILE_FORMAT", 124 | formatter=PARQUETFormatter().compression("AUTO"), 125 | replace_if_exists=True, 126 | ) 127 | actual = sql_compiler(create_format_replace) 128 | expected = ( 129 | "CREATE OR REPLACE FILE FORMAT ML_POC.PUBLIC.CSV_FILE_FORMAT " 130 | "TYPE='parquet' COMPRESSION = 'AUTO'" 131 | ) 132 | assert actual == expected 133 | -------------------------------------------------------------------------------- /tests/test_custom_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | import pytest 5 | from sqlalchemy import func 6 | 7 | from snowflake.sqlalchemy import snowdialect 8 | 9 | 10 | def test_flatten_does_not_render_params(): 11 | """This behavior is for backward compatibility. 12 | 13 | In previous version params were not rendered. 14 | In future this behavior will change. 15 | """ 16 | flat = func.flatten("[1, 2]", outer=True) 17 | res = flat.compile(dialect=snowdialect.dialect()) 18 | 19 | assert str(res) == "flatten(%(flatten_1)s)" 20 | 21 | 22 | def test_flatten_emits_warning(): 23 | expected_warning = "For backward compatibility params are not rendered." 24 | with pytest.warns(DeprecationWarning, match=expected_warning): 25 | func.flatten().compile(dialect=snowdialect.dialect()) 26 | -------------------------------------------------------------------------------- /tests/test_custom_types.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import pytest 6 | from sqlalchemy import Column, Integer, MetaData, Table, text 7 | 8 | from snowflake.sqlalchemy import TEXT, custom_types 9 | 10 | 11 | def test_string_conversions(): 12 | """Makes sure that all of the Snowflake SQLAlchemy types can be turned into Strings""" 13 | sf_custom_types = [ 14 | "VARIANT", 15 | "OBJECT", 16 | "ARRAY", 17 | "TIMESTAMP_TZ", 18 | "TIMESTAMP_LTZ", 19 | "TIMESTAMP_NTZ", 20 | "GEOGRAPHY", 21 | "GEOMETRY", 22 | ] 23 | sf_types = [ 24 | "TEXT", 25 | "CHARACTER", 26 | "DEC", 27 | "DOUBLE", 28 | "FIXED", 29 | "NUMBER", 30 | "BYTEINT", 31 | "STRING", 32 | "TINYINT", 33 | "VARBINARY", 34 | ] + sf_custom_types 35 | 36 | for type_ in sf_types: 37 | sample = getattr(custom_types, type_)() 38 | if type_ in sf_custom_types: 39 | assert type_ == str(sample) 40 | 41 | 42 | @pytest.mark.feature_max_lob_size 43 | def test_create_table_with_text_type(engine_testaccount): 44 | metadata = MetaData() 45 | table_name = "test_max_lob_size_0" 46 | test_max_lob_size = Table( 47 | table_name, 48 | metadata, 49 | Column("id", Integer, primary_key=True), 50 | Column("full_name", TEXT(), server_default=text("id::varchar")), 51 | ) 52 | 53 | metadata.create_all(engine_testaccount) 54 | try: 55 | assert test_max_lob_size is not None 56 | 57 | with engine_testaccount.connect() as conn: 58 | with conn.begin(): 59 | query = text(f"SELECT GET_DDL('TABLE', '{table_name}')") 60 | result = conn.execute(query) 61 | row = str(result.mappings().fetchone()) 62 | assert ( 63 | "VARCHAR(134217728)" in row 64 | ), f"Expected VARCHAR(134217728) in {row}" 65 | 66 | finally: 67 | test_max_lob_size.drop(engine_testaccount) 68 | -------------------------------------------------------------------------------- /tests/test_geography.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from json import loads 6 | 7 | from sqlalchemy import Column, Integer, MetaData, Table 8 | from sqlalchemy.sql import select 9 | 10 | from snowflake.sqlalchemy import GEOGRAPHY 11 | 12 | 13 | def test_create_table_geography_datatypes(engine_testaccount): 14 | """ 15 | Create table including geography data types 16 | """ 17 | 18 | metadata = MetaData() 19 | table_name = "test_geography0" 20 | test_geography = Table( 21 | table_name, 22 | metadata, 23 | Column("id", Integer, primary_key=True), 24 | Column("geo", GEOGRAPHY), 25 | ) 26 | metadata.create_all(engine_testaccount) 27 | try: 28 | assert test_geography is not None 29 | finally: 30 | test_geography.drop(engine_testaccount) 31 | 32 | 33 | def test_inspect_geography_datatypes(engine_testaccount): 34 | """ 35 | Create table including geography data types 36 | """ 37 | metadata = MetaData() 38 | table_name = "test_geography0" 39 | test_geography = Table( 40 | table_name, 41 | metadata, 42 | Column("id", Integer, primary_key=True), 43 | Column("geo1", GEOGRAPHY), 44 | Column("geo2", GEOGRAPHY), 45 | ) 46 | metadata.create_all(engine_testaccount) 47 | 48 | try: 49 | with engine_testaccount.connect() as conn: 50 | test_point = "POINT(-122.35 37.55)" 51 | test_point1 = '{"coordinates": [-122.35,37.55],"type": "Point"}' 52 | 53 | ins = test_geography.insert().values( 54 | id=1, geo1=test_point, geo2=test_point1 55 | ) 56 | 57 | with conn.begin(): 58 | results = conn.execute(ins) 59 | results.close() 60 | 61 | s = select(test_geography) 62 | results = conn.execute(s) 63 | rows = results.fetchone() 64 | results.close() 65 | assert rows[0] == 1 66 | assert rows[1] == rows[2] 67 | assert loads(rows[2]) == loads(test_point1) 68 | finally: 69 | test_geography.drop(engine_testaccount) 70 | -------------------------------------------------------------------------------- /tests/test_geometry.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | from json import loads 5 | 6 | from sqlalchemy import Column, Integer, MetaData, Table 7 | from sqlalchemy.sql import select 8 | 9 | from snowflake.sqlalchemy import GEOMETRY 10 | 11 | 12 | def test_create_table_geometry_datatypes(engine_testaccount): 13 | """ 14 | Create table including geometry data types 15 | """ 16 | metadata = MetaData() 17 | table_name = "test_geometry0" 18 | test_geometry = Table( 19 | table_name, 20 | metadata, 21 | Column("id", Integer, primary_key=True), 22 | Column("geom", GEOMETRY), 23 | ) 24 | metadata.create_all(engine_testaccount) 25 | try: 26 | assert test_geometry is not None 27 | finally: 28 | test_geometry.drop(engine_testaccount) 29 | 30 | 31 | def test_inspect_geometry_datatypes(engine_testaccount): 32 | """ 33 | Create table including geometry data types 34 | """ 35 | metadata = MetaData() 36 | table_name = "test_geometry0" 37 | test_geometry = Table( 38 | table_name, 39 | metadata, 40 | Column("id", Integer, primary_key=True), 41 | Column("geom1", GEOMETRY), 42 | Column("geom2", GEOMETRY), 43 | ) 44 | metadata.create_all(engine_testaccount) 45 | 46 | try: 47 | with engine_testaccount.connect() as conn: 48 | test_point = "POINT(-94.58473 39.08985)" 49 | test_point1 = '{"coordinates": [-94.58473, 39.08985],"type": "Point"}' 50 | 51 | ins = test_geometry.insert().values( 52 | id=1, geom1=test_point, geom2=test_point1 53 | ) 54 | 55 | with conn.begin(): 56 | results = conn.execute(ins) 57 | results.close() 58 | 59 | s = select(test_geometry) 60 | results = conn.execute(s) 61 | rows = results.fetchone() 62 | results.close() 63 | assert rows[0] == 1 64 | assert rows[1] == rows[2] 65 | assert loads(rows[2]) == loads(test_point1) 66 | finally: 67 | test_geometry.drop(engine_testaccount) 68 | -------------------------------------------------------------------------------- /tests/test_imports.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import importlib 6 | import inspect 7 | 8 | import pytest 9 | 10 | 11 | def get_classes_from_module(module_name): 12 | """Returns a set of class names from a given module.""" 13 | try: 14 | module = importlib.import_module(module_name) 15 | members = inspect.getmembers(module) 16 | return {name for name, obj in members if inspect.isclass(obj)} 17 | 18 | except ImportError: 19 | print(f"Module '{module_name}' could not be imported.") 20 | return set() 21 | 22 | 23 | def test_types_in_snowdialect(): 24 | classes_a = get_classes_from_module( 25 | "snowflake.sqlalchemy.parser.custom_type_parser" 26 | ) 27 | classes_b = get_classes_from_module("snowflake.sqlalchemy.snowdialect") 28 | assert classes_a.issubset(classes_b), str(classes_a - classes_b) 29 | 30 | 31 | @pytest.mark.parametrize( 32 | "type_class_name", 33 | [ 34 | "BIGINT", 35 | "BINARY", 36 | "BOOLEAN", 37 | "CHAR", 38 | "DATE", 39 | "DATETIME", 40 | "DECIMAL", 41 | "FLOAT", 42 | "INTEGER", 43 | "REAL", 44 | "SMALLINT", 45 | "TIME", 46 | "TIMESTAMP", 47 | "VARCHAR", 48 | "NullType", 49 | "_CUSTOM_DECIMAL", 50 | "ARRAY", 51 | "DOUBLE", 52 | "GEOGRAPHY", 53 | "GEOMETRY", 54 | "MAP", 55 | "OBJECT", 56 | "TIMESTAMP_LTZ", 57 | "TIMESTAMP_NTZ", 58 | "TIMESTAMP_TZ", 59 | "VARIANT", 60 | ], 61 | ) 62 | def test_snowflake_data_types_instance(type_class_name): 63 | classes_b = get_classes_from_module("snowflake.sqlalchemy.snowdialect") 64 | assert type_class_name in classes_b, type_class_name 65 | -------------------------------------------------------------------------------- /tests/test_index_reflection.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | from sqlalchemy import MetaData, inspect 6 | from sqlalchemy.sql.ddl import CreateSchema, DropSchema 7 | 8 | 9 | @pytest.mark.aws 10 | def test_indexes_reflection(engine_testaccount, db_parameters, sql_compiler): 11 | metadata = MetaData() 12 | 13 | table_name = "test_hybrid_table_2" 14 | index_name = "INDEX_NAME_2" 15 | schema = db_parameters["schema"] 16 | index_columns = ["name", "name2"] 17 | 18 | create_table_sql = f""" 19 | CREATE HYBRID TABLE {table_name} ( 20 | id INT primary key, 21 | name VARCHAR, 22 | name2 VARCHAR, 23 | INDEX {index_name} ({', '.join(index_columns)}) 24 | ); 25 | """ 26 | 27 | with engine_testaccount.connect() as connection: 28 | connection.exec_driver_sql(create_table_sql) 29 | 30 | insp = inspect(engine_testaccount) 31 | 32 | try: 33 | with engine_testaccount.connect(): 34 | # Prefixes reflection not supported, example: "HYBRID, DYNAMIC" 35 | indexes = insp.get_indexes(table_name, schema) 36 | assert len(indexes) == 1 37 | assert indexes[0].get("name") == index_name 38 | assert indexes[0].get("column_names") == index_columns 39 | 40 | finally: 41 | metadata.drop_all(engine_testaccount) 42 | 43 | 44 | @pytest.mark.aws 45 | def test_simple_reflection_hybrid_table_as_table( 46 | engine_testaccount, assert_text_in_buf, db_parameters, sql_compiler, snapshot 47 | ): 48 | metadata = MetaData() 49 | table_name = "test_simple_reflection_hybrid_table_as_table" 50 | schema = db_parameters["schema"] + "_reflections" 51 | with engine_testaccount.connect() as connection: 52 | try: 53 | connection.execute(CreateSchema(schema)) 54 | 55 | create_table_sql = f""" 56 | CREATE HYBRID TABLE {schema}.{table_name} (id INT primary key, new_column VARCHAR, INDEX index_name (new_column)); 57 | """ 58 | connection.exec_driver_sql(create_table_sql) 59 | 60 | metadata.reflect(engine_testaccount, schema=schema) 61 | 62 | assert_text_in_buf( 63 | f"SHOW /* sqlalchemy:get_schema_tables_info */ TABLES IN SCHEMA {schema}", 64 | occurrences=1, 65 | ) 66 | 67 | finally: 68 | connection.execute(DropSchema(schema, cascade=True)) 69 | -------------------------------------------------------------------------------- /tests/test_multivalues_insert.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy import Integer, Sequence, String 6 | from sqlalchemy.schema import Column, MetaData, Table 7 | from sqlalchemy.sql import select 8 | 9 | 10 | def test_insert_table(engine_testaccount): 11 | metadata = MetaData() 12 | users = Table( 13 | "users", 14 | metadata, 15 | Column("id", Integer, Sequence("user_id_seq"), primary_key=True), 16 | Column("name", String), 17 | Column("fullname", String), 18 | ) 19 | metadata.create_all(engine_testaccount) 20 | 21 | data = [ 22 | { 23 | "id": 1, 24 | "name": "testname1", 25 | "fullname": "fulltestname1", 26 | }, 27 | { 28 | "id": 2, 29 | "name": "testname2", 30 | "fullname": "fulltestname2", 31 | }, 32 | ] 33 | try: 34 | with engine_testaccount.connect() as conn: 35 | # using multivalue insert 36 | with conn.begin(): 37 | conn.execute(users.insert().values(data)) 38 | results = conn.execute(select(users).order_by("id")) 39 | row = results.fetchone() 40 | assert row._mapping["name"] == "testname1" 41 | 42 | finally: 43 | users.drop(engine_testaccount) 44 | -------------------------------------------------------------------------------- /tests/test_qmark.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import os 6 | import sys 7 | 8 | import pandas as pd 9 | import pytest 10 | from sqlalchemy import text 11 | 12 | THIS_DIR = os.path.dirname(os.path.realpath(__file__)) 13 | 14 | 15 | def test_qmark_bulk_insert(engine_testaccount_with_qmark): 16 | """ 17 | Bulk insert using qmark paramstyle 18 | """ 19 | if sys.version_info < (3, 8): 20 | pytest.skip( 21 | "In Python 3.7, this test depends on pandas features of which the implementation is incompatible with sqlachemy 2.0, and pandas does not support Python 3.7 anymore." 22 | ) 23 | 24 | with engine_testaccount_with_qmark.connect() as con: 25 | with con.begin(): 26 | con.exec_driver_sql( 27 | """ 28 | create or replace table src(c1 int, c2 string) as select seq8(), 29 | randstr(100, random()) from table(generator(rowcount=>100000)) 30 | """ 31 | ) 32 | con.exec_driver_sql("create or replace table dst like src") 33 | 34 | for data in pd.read_sql_query( 35 | text("select * from src"), con, chunksize=16000 36 | ): 37 | data.to_sql( 38 | "dst", con, if_exists="append", index=False, index_label=None 39 | ) 40 | -------------------------------------------------------------------------------- /tests/test_quote.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy import Column, Integer, MetaData, Sequence, String, Table, inspect 6 | 7 | 8 | def test_table_name_with_reserved_words(engine_testaccount, db_parameters): 9 | metadata = MetaData() 10 | test_table_name = "insert" 11 | insert_table = Table( 12 | test_table_name, 13 | metadata, 14 | Column("id", Integer, Sequence(f"{test_table_name}_id_seq"), primary_key=True), 15 | Column("name", String), 16 | Column("fullname", String), 17 | ) 18 | 19 | metadata.create_all(engine_testaccount) 20 | try: 21 | inspector = inspect(engine_testaccount) 22 | columns_in_insert = inspector.get_columns(test_table_name) 23 | assert len(columns_in_insert) == 3 24 | assert columns_in_insert[0]["autoincrement"] is False 25 | assert ( 26 | f"{test_table_name}_id_seq.nextval" 27 | in columns_in_insert[0]["default"].lower() 28 | ) 29 | assert columns_in_insert[0]["name"] == "id" 30 | assert columns_in_insert[0]["primary_key"] 31 | assert not columns_in_insert[0]["nullable"] 32 | 33 | columns_in_insert = inspector.get_columns( 34 | test_table_name, schema=db_parameters["schema"] 35 | ) 36 | assert len(columns_in_insert) == 3 37 | 38 | finally: 39 | insert_table.drop(engine_testaccount) 40 | return insert_table 41 | 42 | 43 | def test_table_column_as_underscore(engine_testaccount): 44 | metadata = MetaData() 45 | test_table_name = "table_1745924" 46 | insert_table = Table( 47 | test_table_name, 48 | metadata, 49 | Column("ca", Integer), 50 | Column("cb", String), 51 | Column("_", String), 52 | ) 53 | metadata.create_all(engine_testaccount) 54 | try: 55 | inspector = inspect(engine_testaccount) 56 | columns_in_insert = inspector.get_columns(test_table_name) 57 | assert len(columns_in_insert) == 3 58 | assert columns_in_insert[0]["name"] == "ca" 59 | assert columns_in_insert[1]["name"] == "cb" 60 | assert columns_in_insert[2]["name"] == "_" 61 | finally: 62 | insert_table.drop(engine_testaccount) 63 | return insert_table 64 | -------------------------------------------------------------------------------- /tests/test_quote_identifiers.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | 4 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 5 | import pytest 6 | from sqlalchemy import Column, Integer, MetaData, String, Table, insert, select 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "identifier", 11 | ( 12 | pytest.param("_", id="underscore"), 13 | pytest.param(".", id="dot"), 14 | ), 15 | ) 16 | def test_insert_with_identifier_as_column_name(identifier: str, engine_testaccount): 17 | expected_identifier = f"test: {identifier}" 18 | metadata = MetaData() 19 | table = Table( 20 | "table_1745924", 21 | metadata, 22 | Column("ca", Integer), 23 | Column("cb", String), 24 | Column(identifier, String), 25 | ) 26 | 27 | try: 28 | metadata.create_all(engine_testaccount) 29 | 30 | with engine_testaccount.connect() as connection: 31 | connection.execute( 32 | insert(table).values( 33 | { 34 | "ca": 1, 35 | "cb": "test", 36 | identifier: f"test: {identifier}", 37 | } 38 | ) 39 | ) 40 | result = connection.execute(select(table)).fetchall() 41 | assert result == [(1, "test", expected_identifier)] 42 | finally: 43 | metadata.drop_all(engine_testaccount) 44 | -------------------------------------------------------------------------------- /tests/test_semi_structured_datatypes.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import json 6 | import textwrap 7 | 8 | import pytest 9 | from sqlalchemy import Column, Integer, MetaData, Table, inspect 10 | from sqlalchemy.sql import select 11 | 12 | from snowflake.sqlalchemy import ARRAY, OBJECT, VARIANT 13 | 14 | 15 | def test_create_table_semi_structured_datatypes(engine_testaccount): 16 | """ 17 | Create table including semi-structured data types 18 | """ 19 | metadata = MetaData() 20 | table_name = "test_variant0" 21 | test_variant = Table( 22 | table_name, 23 | metadata, 24 | Column("id", Integer, primary_key=True), 25 | Column("va", VARIANT), 26 | Column("ob", OBJECT), 27 | Column("ar", ARRAY), 28 | ) 29 | metadata.create_all(engine_testaccount) 30 | try: 31 | assert test_variant is not None 32 | finally: 33 | test_variant.drop(engine_testaccount) 34 | 35 | 36 | @pytest.mark.skip( 37 | """ 38 | Semi-structured data cannot be inserted by INSERT VALUES. Instead, 39 | INSERT SELECT must be used. The fix should be either 1) SQLAlchemy dialect 40 | transforms INSERT statement or 2) Snwoflake DB supports INSERT VALUES for 41 | semi-structured data types. No ETA for this fix. 42 | """ 43 | ) 44 | def test_insert_semi_structured_datatypes(engine_testaccount): 45 | metadata = MetaData() 46 | table_name = "test_variant1" 47 | test_variant = Table( 48 | table_name, 49 | metadata, 50 | Column("id", Integer, primary_key=True), 51 | Column("va", VARIANT), 52 | Column("ob", OBJECT), 53 | Column("ar", ARRAY), 54 | ) 55 | metadata.create_all(engine_testaccount) 56 | try: 57 | ins = test_variant.insert().values(id=1, va='{"vk1":100, "vk2":200, "vk3":300}') 58 | results = engine_testaccount.execute(ins) 59 | results.close() 60 | finally: 61 | test_variant.drop(engine_testaccount) 62 | 63 | 64 | def test_inspect_semi_structured_datatypes(engine_testaccount): 65 | """ 66 | Inspects semi-structured data type columns 67 | """ 68 | table_name = "test_variant2" 69 | metadata = MetaData() 70 | test_variant = Table( 71 | table_name, 72 | metadata, 73 | Column("id", Integer, primary_key=True), 74 | Column("va", VARIANT), 75 | Column("ar", ARRAY), 76 | ) 77 | metadata.create_all(engine_testaccount) 78 | try: 79 | with engine_testaccount.connect() as conn: 80 | with conn.begin(): 81 | sql = textwrap.dedent( 82 | f""" 83 | INSERT INTO {table_name}(id, va, ar) 84 | SELECT 1, 85 | PARSE_JSON('{{"vk1":100, "vk2":200, "vk3":300}}'), 86 | PARSE_JSON('[ 87 | {{"k":1, "v":"str1"}}, 88 | {{"k":2, "v":"str2"}}, 89 | {{"k":3, "v":"str3"}}]' 90 | ) 91 | """ 92 | ) 93 | conn.exec_driver_sql(sql) 94 | inspecter = inspect(engine_testaccount) 95 | columns = inspecter.get_columns(table_name) 96 | assert isinstance(columns[1]["type"], VARIANT) 97 | assert isinstance(columns[2]["type"], ARRAY) 98 | 99 | s = select(test_variant) 100 | results = conn.execute(s) 101 | rows = results.fetchone() 102 | results.close() 103 | assert rows[0] == 1 104 | data = json.loads(rows[1]) 105 | assert data["vk1"] == 100 106 | assert data["vk3"] == 300 107 | assert data is not None 108 | data = json.loads(rows[2]) 109 | assert data[1]["k"] == 2 110 | finally: 111 | test_variant.drop(engine_testaccount) 112 | -------------------------------------------------------------------------------- /tests/test_sequence.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy import ( 6 | Column, 7 | Identity, 8 | Integer, 9 | MetaData, 10 | Sequence, 11 | String, 12 | Table, 13 | insert, 14 | select, 15 | ) 16 | from sqlalchemy.sql import text 17 | from sqlalchemy.sql.ddl import CreateTable 18 | 19 | 20 | def test_table_with_sequence(engine_testaccount, db_parameters): 21 | """Snowflake does not guarantee generating sequence numbers without gaps. 22 | 23 | The generated numbers are not necessarily contiguous. 24 | https://docs.snowflake.com/en/user-guide/querying-sequences 25 | """ 26 | # https://github.com/snowflakedb/snowflake-sqlalchemy/issues/124 27 | test_table_name = "sequence" 28 | test_sequence_name = f"{test_table_name}_id_seq" 29 | metadata = MetaData() 30 | 31 | sequence_table = Table( 32 | test_table_name, 33 | metadata, 34 | Column( 35 | "id", Integer, Sequence(test_sequence_name, order=True), primary_key=True 36 | ), 37 | Column("data", String(39)), 38 | ) 39 | 40 | autoload_metadata = MetaData() 41 | 42 | try: 43 | metadata.create_all(engine_testaccount) 44 | 45 | with engine_testaccount.begin() as conn: 46 | conn.execute(insert(sequence_table), ({"data": "test_insert_1"})) 47 | result = conn.execute(select(sequence_table)).fetchall() 48 | assert result == [(1, "test_insert_1")], result 49 | 50 | autoload_sequence_table = Table( 51 | test_table_name, 52 | autoload_metadata, 53 | autoload_with=engine_testaccount, 54 | ) 55 | seq = Sequence(test_sequence_name, order=True) 56 | 57 | conn.execute( 58 | insert(autoload_sequence_table), 59 | ( 60 | {"data": "multi_insert_1"}, 61 | {"data": "multi_insert_2"}, 62 | ), 63 | ) 64 | conn.execute( 65 | insert(autoload_sequence_table), 66 | ({"data": "test_insert_2"},), 67 | ) 68 | 69 | nextid = conn.execute(seq) 70 | conn.execute( 71 | insert(autoload_sequence_table), 72 | ({"id": nextid, "data": "test_insert_seq"}), 73 | ) 74 | 75 | result = conn.execute(select(sequence_table)).fetchall() 76 | 77 | assert result == [ 78 | (1, "test_insert_1"), 79 | (2, "multi_insert_1"), 80 | (3, "multi_insert_2"), 81 | (4, "test_insert_2"), 82 | (5, "test_insert_seq"), 83 | ], result 84 | 85 | finally: 86 | metadata.drop_all(engine_testaccount) 87 | 88 | 89 | def test_table_with_autoincrement(engine_testaccount): 90 | """Snowflake does not guarantee generating sequence numbers without gaps. 91 | 92 | The generated numbers are not necessarily contiguous. 93 | https://docs.snowflake.com/en/user-guide/querying-sequences 94 | """ 95 | # https://github.com/snowflakedb/snowflake-sqlalchemy/issues/124 96 | test_table_name = "sequence" 97 | metadata = MetaData() 98 | autoincrement_table = Table( 99 | test_table_name, 100 | metadata, 101 | Column("id", Integer, autoincrement=True, primary_key=True), 102 | Column("data", String(39)), 103 | ) 104 | 105 | select_stmt = select(autoincrement_table).order_by("id") 106 | 107 | try: 108 | with engine_testaccount.begin() as conn: 109 | conn.execute(text("ALTER SESSION SET NOORDER_SEQUENCE_AS_DEFAULT = FALSE")) 110 | metadata.create_all(conn) 111 | 112 | conn.execute(insert(autoincrement_table), ({"data": "test_insert_1"})) 113 | result = conn.execute(select_stmt).fetchall() 114 | assert result == [(1, "test_insert_1")] 115 | 116 | autoload_sequence_table = Table( 117 | test_table_name, MetaData(), autoload_with=engine_testaccount 118 | ) 119 | conn.execute( 120 | insert(autoload_sequence_table), 121 | [ 122 | {"data": "multi_insert_1"}, 123 | {"data": "multi_insert_2"}, 124 | ], 125 | ) 126 | conn.execute( 127 | insert(autoload_sequence_table), 128 | [{"data": "test_insert_2"}], 129 | ) 130 | result = conn.execute(select_stmt).fetchall() 131 | assert result == [ 132 | (1, "test_insert_1"), 133 | (2, "multi_insert_1"), 134 | (3, "multi_insert_2"), 135 | (4, "test_insert_2"), 136 | ], result 137 | 138 | finally: 139 | metadata.drop_all(engine_testaccount) 140 | 141 | 142 | def test_table_with_identity(sql_compiler): 143 | test_table_name = "identity" 144 | metadata = MetaData() 145 | identity_autoincrement_table = Table( 146 | test_table_name, 147 | metadata, 148 | Column( 149 | "id", Integer, Identity(start=1, increment=1, order=True), primary_key=True 150 | ), 151 | Column("identity_col_unordered", Integer, Identity(order=False)), 152 | Column("identity_col", Integer, Identity()), 153 | ) 154 | create_table = CreateTable(identity_autoincrement_table) 155 | actual = sql_compiler(create_table) 156 | expected = ( 157 | "CREATE TABLE identity (" 158 | "\tid INTEGER NOT NULL IDENTITY(1,1) ORDER, " 159 | "\tidentity_col_unordered INTEGER NOT NULL IDENTITY NOORDER, " 160 | "\tidentity_col INTEGER NOT NULL IDENTITY, " 161 | "\tPRIMARY KEY (id))" 162 | ) 163 | assert actual == expected 164 | -------------------------------------------------------------------------------- /tests/test_timestamp.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from datetime import datetime 6 | 7 | import pytz 8 | from sqlalchemy import Column, Integer, MetaData, Table 9 | from sqlalchemy.sql import select 10 | 11 | from snowflake.sqlalchemy import TIMESTAMP_LTZ, TIMESTAMP_NTZ, TIMESTAMP_TZ 12 | 13 | PST_TZ = "America/Los_Angeles" 14 | JST_TZ = "Asia/Tokyo" 15 | 16 | 17 | def test_create_table_timestamp_datatypes(engine_testaccount): 18 | """ 19 | Create table including timestamp data types 20 | """ 21 | metadata = MetaData() 22 | table_name = "test_timestamp0" 23 | test_timestamp = Table( 24 | table_name, 25 | metadata, 26 | Column("id", Integer, primary_key=True), 27 | Column("tsntz", TIMESTAMP_NTZ), 28 | Column("tsltz", TIMESTAMP_LTZ), 29 | Column("tstz", TIMESTAMP_TZ), 30 | ) 31 | metadata.create_all(engine_testaccount) 32 | try: 33 | assert test_timestamp is not None 34 | finally: 35 | test_timestamp.drop(engine_testaccount) 36 | 37 | 38 | def test_inspect_timestamp_datatypes(engine_testaccount): 39 | """ 40 | Create table including timestamp data types 41 | """ 42 | metadata = MetaData() 43 | table_name = "test_timestamp0" 44 | test_timestamp = Table( 45 | table_name, 46 | metadata, 47 | Column("id", Integer, primary_key=True), 48 | Column("tsntz", TIMESTAMP_NTZ), 49 | Column("tsltz", TIMESTAMP_LTZ), 50 | Column("tstz", TIMESTAMP_TZ), 51 | ) 52 | metadata.create_all(engine_testaccount) 53 | try: 54 | current_utctime = datetime.utcnow() 55 | current_localtime = pytz.utc.localize(current_utctime, is_dst=False).astimezone( 56 | pytz.timezone(PST_TZ) 57 | ) 58 | current_localtime_without_tz = datetime.now() 59 | current_localtime_with_other_tz = pytz.utc.localize( 60 | current_localtime_without_tz, is_dst=False 61 | ).astimezone(pytz.timezone(JST_TZ)) 62 | 63 | ins = test_timestamp.insert().values( 64 | id=1, 65 | tsntz=current_utctime, 66 | tsltz=current_localtime, 67 | tstz=current_localtime_with_other_tz, 68 | ) 69 | with engine_testaccount.connect() as conn: 70 | with conn.begin(): 71 | results = conn.execute(ins) 72 | results.close() 73 | 74 | s = select(test_timestamp) 75 | results = conn.execute(s) 76 | rows = results.fetchone() 77 | results.close() 78 | assert rows[0] == 1 79 | assert rows[1] == current_utctime 80 | assert rows[2] == current_localtime 81 | assert rows[3] == current_localtime_with_other_tz 82 | finally: 83 | test_timestamp.drop(engine_testaccount) 84 | -------------------------------------------------------------------------------- /tests/test_transactions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy import Column, Integer, MetaData, String, select, text 6 | 7 | from snowflake.sqlalchemy import SnowflakeTable 8 | 9 | CURRENT_TRANSACTION = text("SELECT CURRENT_TRANSACTION()") 10 | 11 | 12 | def test_connect_read_commited(engine_testaccount, assert_text_in_buf): 13 | metadata = MetaData() 14 | table_name = "test_connect_read_commited" 15 | 16 | test_table_1 = SnowflakeTable( 17 | table_name, 18 | metadata, 19 | Column("id", Integer, primary_key=True), 20 | Column("name", String), 21 | cluster_by=["id", text("id > 5")], 22 | ) 23 | 24 | metadata.create_all(engine_testaccount) 25 | try: 26 | with engine_testaccount.connect().execution_options( 27 | isolation_level="READ COMMITTED" 28 | ) as connection: 29 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 30 | assert result[0] == (None,), result 31 | ins = test_table_1.insert().values(id=1, name="test") 32 | connection.execute(ins) 33 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 34 | assert result[0] != ( 35 | None, 36 | ), "AUTOCOMMIT DISABLED, transaction should be started" 37 | 38 | with engine_testaccount.connect() as conn: 39 | s = select(test_table_1) 40 | results = conn.execute(s).fetchall() 41 | assert len(results) == 0, results # No insert commited 42 | assert_text_in_buf("ROLLBACK", occurrences=1) 43 | finally: 44 | metadata.drop_all(engine_testaccount) 45 | 46 | 47 | def test_begin_read_commited(engine_testaccount, assert_text_in_buf): 48 | metadata = MetaData() 49 | table_name = "test_begin_read_commited" 50 | 51 | test_table_1 = SnowflakeTable( 52 | table_name, 53 | metadata, 54 | Column("id", Integer, primary_key=True), 55 | Column("name", String), 56 | cluster_by=["id", text("id > 5")], 57 | ) 58 | 59 | metadata.create_all(engine_testaccount) 60 | try: 61 | with engine_testaccount.connect().execution_options( 62 | isolation_level="READ COMMITTED" 63 | ) as connection, connection.begin(): 64 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 65 | assert result[0] == (None,), result 66 | ins = test_table_1.insert().values(id=1, name="test") 67 | connection.execute(ins) 68 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 69 | assert result[0] != ( 70 | None, 71 | ), "AUTOCOMMIT DISABLED, transaction should be started" 72 | 73 | with engine_testaccount.connect() as conn: 74 | s = select(test_table_1) 75 | results = conn.execute(s).fetchall() 76 | assert len(results) == 1, results # Insert commited 77 | assert_text_in_buf("COMMIT", occurrences=2) 78 | finally: 79 | metadata.drop_all(engine_testaccount) 80 | 81 | 82 | def test_connect_autocommit(engine_testaccount, assert_text_in_buf): 83 | metadata = MetaData() 84 | table_name = "test_connect_autocommit" 85 | 86 | test_table_1 = SnowflakeTable( 87 | table_name, 88 | metadata, 89 | Column("id", Integer, primary_key=True), 90 | Column("name", String), 91 | cluster_by=["id", text("id > 5")], 92 | ) 93 | 94 | metadata.create_all(engine_testaccount) 95 | try: 96 | with engine_testaccount.connect().execution_options( 97 | isolation_level="AUTOCOMMIT" 98 | ) as connection: 99 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 100 | assert result[0] == (None,), result 101 | ins = test_table_1.insert().values(id=1, name="test") 102 | connection.execute(ins) 103 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 104 | assert result[0] == ( 105 | None, 106 | ), "Autocommit enabled, transaction should not be started" 107 | 108 | with engine_testaccount.connect() as conn: 109 | s = select(test_table_1) 110 | results = conn.execute(s).fetchall() 111 | assert len(results) == 1, results 112 | assert_text_in_buf( 113 | "ROLLBACK using DBAPI connection.rollback(), DBAPI should ignore due to autocommit mode", 114 | occurrences=1, 115 | ) 116 | 117 | finally: 118 | metadata.drop_all(engine_testaccount) 119 | 120 | 121 | def test_begin_autocommit(engine_testaccount, assert_text_in_buf): 122 | metadata = MetaData() 123 | table_name = "test_begin_autocommit" 124 | 125 | test_table_1 = SnowflakeTable( 126 | table_name, 127 | metadata, 128 | Column("id", Integer, primary_key=True), 129 | Column("name", String), 130 | cluster_by=["id", text("id > 5")], 131 | ) 132 | 133 | metadata.create_all(engine_testaccount) 134 | try: 135 | with engine_testaccount.connect().execution_options( 136 | isolation_level="AUTOCOMMIT" 137 | ) as connection, connection.begin(): 138 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 139 | assert result[0] == (None,), result 140 | ins = test_table_1.insert().values(id=1, name="test") 141 | connection.execute(ins) 142 | result = connection.execute(CURRENT_TRANSACTION).fetchall() 143 | assert result[0] == ( 144 | None, 145 | ), "Autocommit enabled, transaction should not be started" 146 | 147 | with engine_testaccount.connect() as conn: 148 | s = select(test_table_1) 149 | results = conn.execute(s).fetchall() 150 | assert len(results) == 1, results 151 | assert_text_in_buf( 152 | "COMMIT using DBAPI connection.commit(), DBAPI should ignore due to autocommit mode", 153 | occurrences=1, 154 | ) 155 | 156 | finally: 157 | metadata.drop_all(engine_testaccount) 158 | -------------------------------------------------------------------------------- /tests/test_unit_core.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from sqlalchemy.engine.url import URL 6 | 7 | from snowflake.sqlalchemy import base 8 | 9 | 10 | def test_create_connect_args(): 11 | sfdialect = base.dialect() 12 | 13 | test_data = [ 14 | ( 15 | # 0: full host name and no account 16 | URL.create( 17 | "snowflake", 18 | username="testuser", 19 | password="testpassword", 20 | host="testaccount.snowflakecomputing.com", 21 | query={}, 22 | ), 23 | { 24 | "autocommit": False, 25 | "host": "testaccount.snowflakecomputing.com", 26 | "password": "testpassword", 27 | "user": "testuser", 28 | }, 29 | ), 30 | ( 31 | # 1: account name only 32 | URL.create( 33 | "snowflake", 34 | username="testuser", 35 | password="testpassword", 36 | host="testaccount", 37 | query={}, 38 | ), 39 | { 40 | "autocommit": False, 41 | "host": "testaccount.snowflakecomputing.com", 42 | "password": "testpassword", 43 | "user": "testuser", 44 | "port": "443", 45 | "account": "testaccount", 46 | }, 47 | ), 48 | ( 49 | # 2: account name including region 50 | URL.create( 51 | "snowflake", 52 | username="testuser", 53 | password="testpassword", 54 | host="testaccount.eu-central-1", 55 | query={}, 56 | ), 57 | { 58 | "autocommit": False, 59 | "host": "testaccount.eu-central-1.snowflakecomputing.com", 60 | "password": "testpassword", 61 | "user": "testuser", 62 | "port": "443", 63 | "account": "testaccount", 64 | }, 65 | ), 66 | ( 67 | # 3: full host including region 68 | URL.create( 69 | "snowflake", 70 | username="testuser", 71 | password="testpassword", 72 | host="testaccount.eu-central-1.snowflakecomputing.com", 73 | query={}, 74 | ), 75 | { 76 | "autocommit": False, 77 | "host": "testaccount.eu-central-1.snowflakecomputing.com", 78 | "password": "testpassword", 79 | "user": "testuser", 80 | }, 81 | ), 82 | ( 83 | # 4: full host including region and account 84 | URL.create( 85 | "snowflake", 86 | username="testuser", 87 | password="testpassword", 88 | host="testaccount.eu-central-1.snowflakecomputing.com", 89 | query={"account": "testaccount"}, 90 | ), 91 | { 92 | "autocommit": False, 93 | "host": "testaccount.eu-central-1.snowflakecomputing.com", 94 | "password": "testpassword", 95 | "user": "testuser", 96 | "account": "testaccount", 97 | }, 98 | ), 99 | ( 100 | # 5: full host including region and account including region 101 | URL.create( 102 | "snowflake", 103 | username="testuser", 104 | password="testpassword", 105 | host="testaccount.eu-central-1.snowflakecomputing.com", 106 | query={"account": "testaccount.eu-central-1"}, 107 | ), 108 | { 109 | "autocommit": False, 110 | "host": "testaccount.eu-central-1.snowflakecomputing.com", 111 | "password": "testpassword", 112 | "user": "testuser", 113 | "account": "testaccount.eu-central-1", 114 | }, 115 | ), 116 | ( 117 | # 6: full host including region and account including region 118 | URL.create( 119 | "snowflake", 120 | username="testuser", 121 | password="testpassword", 122 | host="snowflake.reg.local", 123 | port="8082", 124 | query={"account": "testaccount"}, 125 | ), 126 | { 127 | "autocommit": False, 128 | "host": "snowflake.reg.local", 129 | "password": "testpassword", 130 | "user": "testuser", 131 | "port": 8082, 132 | "account": "testaccount", 133 | }, 134 | ), 135 | ( 136 | # 7: Global URL 137 | URL.create( 138 | "snowflake", 139 | username="testuser", 140 | password="testpassword", 141 | host="testaccount-hso894gsiuafdhsaj935.global", 142 | ), 143 | { 144 | "autocommit": False, 145 | "host": "testaccount-hso894gsiuafdhsaj935.global.snowflakecomputing.com", 146 | "password": "testpassword", 147 | "user": "testuser", 148 | "port": "443", 149 | "account": "testaccount", 150 | }, 151 | ), 152 | ] 153 | 154 | for idx, ts in enumerate(test_data): 155 | _, opts = sfdialect.create_connect_args(ts[0]) 156 | assert opts == ts[1], f"Failed: {idx}: {ts[0]}" 157 | 158 | 159 | def test_denormalize_quote_join(): 160 | sfdialect = base.dialect() 161 | 162 | test_data = [ 163 | (["abc", "cde"], "abc.cde"), 164 | (["abc.cde", "def"], "abc.cde.def"), 165 | (['"Abc".cde', "def"], '"Abc".cde.def'), 166 | (['"Abc".cde', '"dEf"'], '"Abc".cde."dEf"'), 167 | ] 168 | for ts in test_data: 169 | assert sfdialect._denormalize_quote_join(*ts[0]) == ts[1] 170 | -------------------------------------------------------------------------------- /tests/test_unit_cte.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | 6 | def test_cte(): 7 | from datetime import date 8 | 9 | from sqlalchemy import Column, Date, Integer, MetaData, Table, literal, select 10 | 11 | from snowflake.sqlalchemy import snowdialect 12 | 13 | metadata = MetaData() 14 | visitors = Table( 15 | "visitors", 16 | metadata, 17 | Column("product_id", Integer), 18 | Column("date1", Date), 19 | Column("count", Integer), 20 | ) 21 | product_id = 1 22 | day = date.today() 23 | count = 5 24 | with_bar = select(literal(product_id), literal(day), literal(count)).cte("bar") 25 | sel = select(with_bar) 26 | ins = visitors.insert().from_select( 27 | [visitors.c.product_id, visitors.c.date1, visitors.c.count], sel 28 | ) 29 | assert str(ins.compile(dialect=snowdialect.dialect())) == ( 30 | "INSERT INTO visitors (product_id, date1, count) WITH bar AS \n" 31 | "(SELECT %(param_1)s AS anon_1, %(param_2)s AS anon_2, %(param_3)s AS anon_3)\n" 32 | " SELECT bar.anon_1, bar.anon_2, bar.anon_3 \n" 33 | "FROM bar" 34 | ) 35 | -------------------------------------------------------------------------------- /tests/test_unit_structured_types.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import pytest 5 | 6 | from snowflake.sqlalchemy import NUMBER 7 | from snowflake.sqlalchemy.custom_types import MAP, TEXT 8 | from src.snowflake.sqlalchemy.parser.custom_type_parser import ( 9 | parse_type, 10 | tokenize_parameters, 11 | ) 12 | 13 | 14 | def test_compile_map_with_not_null(snapshot): 15 | user_table = MAP(NUMBER(10, 0), TEXT(), not_null=True) 16 | assert user_table.compile() == snapshot 17 | 18 | 19 | def test_extract_parameters(): 20 | example = "a, b(c, d, f), d" 21 | assert tokenize_parameters(example) == ["a", "b(c, d, f)", "d"] 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "input_type, expected_type", 26 | [ 27 | ("BIGINT", "BIGINT"), 28 | ("BINARY(16)", "BINARY(16)"), 29 | ("BOOLEAN", "BOOLEAN"), 30 | ("CHAR(5)", "CHAR(5)"), 31 | ("CHARACTER(5)", "CHAR(5)"), 32 | ("DATE", "DATE"), 33 | ("DATETIME(3)", "DATETIME"), 34 | ("DECIMAL(10, 2)", "DECIMAL(10, 2)"), 35 | ("DEC(10, 2)", "DECIMAL(10, 2)"), 36 | ("DOUBLE", "FLOAT"), 37 | ("FLOAT", "FLOAT"), 38 | ("FIXED(10, 2)", "DECIMAL(10, 2)"), 39 | ("INT", "INTEGER"), 40 | ("INTEGER", "INTEGER"), 41 | ("NUMBER(12, 4)", "DECIMAL(12, 4)"), 42 | ("REAL", "REAL"), 43 | ("BYTEINT", "SMALLINT"), 44 | ("SMALLINT", "SMALLINT"), 45 | ("STRING(255)", "VARCHAR(255)"), 46 | ("TEXT(255)", "VARCHAR(255)"), 47 | ("VARCHAR(255)", "VARCHAR(255)"), 48 | ("TIME(6)", "TIME"), 49 | ("TIMESTAMP(3)", "TIMESTAMP"), 50 | ("TIMESTAMP_TZ(3)", "TIMESTAMP_TZ"), 51 | ("TIMESTAMP_LTZ(3)", "TIMESTAMP_LTZ"), 52 | ("TIMESTAMP_NTZ(3)", "TIMESTAMP_NTZ"), 53 | ("TINYINT", "SMALLINT"), 54 | ("VARBINARY(16)", "BINARY(16)"), 55 | ("VARCHAR(255)", "VARCHAR(255)"), 56 | ("VARIANT", "VARIANT"), 57 | ( 58 | "MAP(DECIMAL(10, 0), MAP(DECIMAL(10, 0), VARCHAR NOT NULL))", 59 | "MAP(DECIMAL(10, 0), MAP(DECIMAL(10, 0), VARCHAR NOT NULL))", 60 | ), 61 | ( 62 | "MAP(DECIMAL(10, 0), MAP(DECIMAL(10, 0), VARCHAR))", 63 | "MAP(DECIMAL(10, 0), MAP(DECIMAL(10, 0), VARCHAR))", 64 | ), 65 | ("MAP(DECIMAL(10, 0), VARIANT)", "MAP(DECIMAL(10, 0), VARIANT)"), 66 | ("OBJECT", "OBJECT"), 67 | ( 68 | "OBJECT(a DECIMAL(10, 0) NOT NULL, b DECIMAL(10, 0), c VARCHAR NOT NULL)", 69 | "OBJECT(a DECIMAL(10, 0) NOT NULL, b DECIMAL(10, 0), c VARCHAR NOT NULL)", 70 | ), 71 | ("ARRAY", "ARRAY"), 72 | ( 73 | "ARRAY(MAP(DECIMAL(10, 0), VARCHAR NOT NULL))", 74 | "ARRAY(MAP(DECIMAL(10, 0), VARCHAR NOT NULL))", 75 | ), 76 | ("GEOGRAPHY", "GEOGRAPHY"), 77 | ("GEOMETRY", "GEOMETRY"), 78 | ], 79 | ) 80 | def test_snowflake_data_types(input_type, expected_type): 81 | assert parse_type(input_type).compile() == expected_type 82 | -------------------------------------------------------------------------------- /tests/test_unit_types.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | import snowflake.sqlalchemy 6 | from snowflake.sqlalchemy.snowdialect import SnowflakeDialect 7 | 8 | from .util import ischema_names_baseline 9 | 10 | 11 | def test_type_synonyms(): 12 | from snowflake.sqlalchemy.snowdialect import ischema_names 13 | 14 | for k, _ in ischema_names.items(): 15 | assert getattr(snowflake.sqlalchemy, k) is not None 16 | 17 | 18 | def test_type_baseline(): 19 | assert set(SnowflakeDialect.ischema_names.keys()) == set( 20 | ischema_names_baseline.keys() 21 | ) 22 | for k, v in SnowflakeDialect.ischema_names.items(): 23 | assert issubclass(v, ischema_names_baseline[k]) 24 | -------------------------------------------------------------------------------- /tests/test_unit_url.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | import urllib.parse 5 | 6 | from snowflake.sqlalchemy import URL 7 | 8 | 9 | def test_url(): 10 | assert ( 11 | URL(account="testaccount", user="admin", password="test", warehouse="testwh") 12 | == "snowflake://admin:test@testaccount/?warehouse=testwh" 13 | ) 14 | 15 | assert ( 16 | URL(account="testaccount", user="admin", password="test") 17 | == "snowflake://admin:test@testaccount/" 18 | ) 19 | 20 | assert ( 21 | URL( 22 | account="testaccount", 23 | user="admin", 24 | password="1-pass 2-pass 3-: 4-@ 5-/ 6-pass", 25 | ) 26 | == "snowflake://admin:1-pass 2-pass 3-%3A 4-%40 5-%2F 6-pass@testaccount/" 27 | ) 28 | 29 | quoted_password = urllib.parse.quote("kx@% jj5/g") 30 | assert ( 31 | URL( 32 | account="testaccount", 33 | user="admin", 34 | password=quoted_password, 35 | ) 36 | == "snowflake://admin:kx%40%25%20jj5%2Fg@testaccount/" 37 | ) 38 | 39 | assert ( 40 | URL(account="testaccount", user="admin", password="test", database="testdb") 41 | == "snowflake://admin:test@testaccount/testdb" 42 | ) 43 | 44 | assert ( 45 | URL( 46 | account="testaccount", 47 | user="admin", 48 | password="test", 49 | database="testdb", 50 | schema="testschema", 51 | ) 52 | == "snowflake://admin:test@testaccount/testdb/testschema" 53 | ) 54 | 55 | assert ( 56 | URL( 57 | account="testaccount", 58 | user="admin", 59 | password="test", 60 | database="testdb", 61 | schema="testschema", 62 | warehouse="testwh", 63 | ) 64 | == "snowflake://admin:test@testaccount/testdb/testschema?warehouse" 65 | "=testwh" 66 | ) 67 | 68 | assert ( 69 | URL( 70 | host="snowflake.reg.local", 71 | account="testaccount", 72 | user="admin", 73 | password="test", 74 | database="testdb", 75 | schema="testschema", 76 | ) 77 | == "snowflake://admin:test@snowflake.reg.local:443/testdb" 78 | "/testschema?account=testaccount" 79 | ) 80 | 81 | assert URL( 82 | user="admin", account="testaccount", password="test", region="eu-central-1" 83 | ) == ("snowflake://admin:test@testaccount.eu-central-1/") 84 | 85 | assert URL( 86 | user="admin", 87 | account="testaccount", 88 | password="test", 89 | region="eu-central-1.azure", 90 | ) == ("snowflake://admin:test@testaccount.eu-central-1.azure/") 91 | 92 | assert URL( 93 | host="testaccount.eu-central-1.snowflakecomputing.com", 94 | user="admin", 95 | account="testaccount", 96 | password="test", 97 | ) == ( 98 | "snowflake://admin:test@testaccount.eu-central-1" 99 | ".snowflakecomputing.com:443/?account=testaccount" 100 | ) 101 | 102 | # empty password should be acceptable in URL utility. The validation will 103 | # happen in Python connector anyway. 104 | assert URL( 105 | host="testaccount.eu-central-1.snowflakecomputing.com", 106 | user="admin", 107 | account="testaccount", 108 | ) == ( 109 | "snowflake://admin:@testaccount.eu-central-1" 110 | ".snowflakecomputing.com:443/?account=testaccount" 111 | ) 112 | 113 | # authenticator=externalbrowser doesn't require a password. 114 | assert URL( 115 | host="testaccount.eu-central-1.snowflakecomputing.com", 116 | user="admin", 117 | account="testaccount", 118 | authenticator="externalbrowser", 119 | ) == ( 120 | "snowflake://admin:@testaccount.eu-central-1" 121 | ".snowflakecomputing.com:443/?account=testaccount" 122 | "&authenticator=externalbrowser" 123 | ) 124 | 125 | # authenticator=oktaurl support 126 | assert URL( 127 | user="testuser", 128 | account="testaccount", 129 | password="test", 130 | authenticator="https://testokta.okta.com", 131 | ) == ( 132 | "snowflake://testuser:test@testaccount" 133 | "/?authenticator=https%3A%2F%2Ftestokta.okta.com" 134 | ) 135 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. 3 | # 4 | 5 | from __future__ import annotations 6 | 7 | import random 8 | import string 9 | from typing import Sequence 10 | 11 | from sqlalchemy.types import ( 12 | BIGINT, 13 | BINARY, 14 | BOOLEAN, 15 | CHAR, 16 | DATE, 17 | DATETIME, 18 | DECIMAL, 19 | FLOAT, 20 | INTEGER, 21 | REAL, 22 | SMALLINT, 23 | TIME, 24 | TIMESTAMP, 25 | VARCHAR, 26 | ) 27 | 28 | from snowflake.sqlalchemy.custom_types import ( 29 | ARRAY, 30 | GEOGRAPHY, 31 | GEOMETRY, 32 | MAP, 33 | OBJECT, 34 | TIMESTAMP_LTZ, 35 | TIMESTAMP_NTZ, 36 | TIMESTAMP_TZ, 37 | VARIANT, 38 | ) 39 | 40 | ischema_names_baseline = { 41 | "BIGINT": BIGINT, 42 | "BINARY": BINARY, 43 | # 'BIT': BIT, 44 | "BOOLEAN": BOOLEAN, 45 | "CHAR": CHAR, 46 | "CHARACTER": CHAR, 47 | "DATE": DATE, 48 | "DATETIME": DATETIME, 49 | "DEC": DECIMAL, 50 | "DECIMAL": DECIMAL, 51 | "DOUBLE": FLOAT, 52 | "FIXED": DECIMAL, 53 | "FLOAT": FLOAT, 54 | "INT": INTEGER, 55 | "INTEGER": INTEGER, 56 | "NUMBER": DECIMAL, 57 | # 'OBJECT': ? 58 | "REAL": REAL, 59 | "BYTEINT": SMALLINT, 60 | "SMALLINT": SMALLINT, 61 | "STRING": VARCHAR, 62 | "TEXT": VARCHAR, 63 | "TIME": TIME, 64 | "TIMESTAMP": TIMESTAMP, 65 | "TIMESTAMP_TZ": TIMESTAMP_TZ, 66 | "TIMESTAMP_LTZ": TIMESTAMP_LTZ, 67 | "TIMESTAMP_NTZ": TIMESTAMP_NTZ, 68 | "TINYINT": SMALLINT, 69 | "VARBINARY": BINARY, 70 | "VARCHAR": VARCHAR, 71 | "VARIANT": VARIANT, 72 | "OBJECT": OBJECT, 73 | "ARRAY": ARRAY, 74 | "GEOGRAPHY": GEOGRAPHY, 75 | "GEOMETRY": GEOMETRY, 76 | "MAP": MAP, 77 | } 78 | 79 | 80 | def random_string( 81 | length: int, 82 | prefix: str = "", 83 | suffix: str = "", 84 | choices: Sequence[str] = string.ascii_lowercase, 85 | ) -> str: 86 | """Our convenience function to generate random string for object names. 87 | 88 | Args: 89 | length: How many random characters to choose from choices. 90 | prefix: Prefix to add to random string generated. 91 | suffix: Suffix to add to random string generated. 92 | choices: A generator of things to choose from. 93 | """ 94 | random_part = "".join([random.choice(choices) for _ in range(length)]) 95 | return "".join([prefix, random_part, suffix]) 96 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | min_version = 4.0.0 3 | envlist = fix_lint, 4 | py{37,38,39,310,311}{,-pandas}, 5 | coverage, 6 | skip_missing_interpreters = true 7 | 8 | [testenv] 9 | package = external 10 | description = run the tests with pytest under {basepython} 11 | extras = 12 | development 13 | pandas 14 | external_wheels = 15 | py37-ci: dist/*.whl 16 | py38-ci: dist/*.whl 17 | py39-ci: dist/.whl 18 | py310-ci: dist/.whl 19 | py311-ci: dist/.whl 20 | deps = pip 21 | passenv = 22 | AWS_ACCESS_KEY_ID 23 | AWS_SECRET_ACCESS_KEY 24 | SF_PROJECT_ROOT 25 | cloud_provider 26 | SF_REGRESS_LOGS 27 | ; Github Actions provided environmental variables 28 | GITHUB_ACTIONS 29 | JENKINS_HOME 30 | ; This is required on windows. Otherwise pwd module won't be imported successfully, 31 | ; see https://github.com/tox-dev/tox/issues/1455 32 | USERNAME 33 | PYTEST_ADDOPTS 34 | setenv = 35 | COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}} 36 | SQLALCHEMY_WARN_20 = 1 37 | ci: SNOWFLAKE_PYTEST_OPTS = -vvv --tb=long 38 | commands = pytest \ 39 | {env:SNOWFLAKE_PYTEST_OPTS:} \ 40 | --cov "snowflake.sqlalchemy" \ 41 | --junitxml {toxworkdir}/junit_{envname}.xml \ 42 | --ignore=tests/sqlalchemy_test_suite \ 43 | {posargs:tests} 44 | pytest {env:SNOWFLAKE_PYTEST_OPTS:} \ 45 | --cov "snowflake.sqlalchemy" --cov-append \ 46 | --junitxml {toxworkdir}/junit_{envname}.xml \ 47 | {posargs:tests/sqlalchemy_test_suite} 48 | 49 | [testenv:.pkg_external] 50 | deps = build 51 | package_glob = {toxinidir}{/}dist{/}*.whl 52 | commands = 53 | pyproject-build -w . -o {toxinidir}{/}dist 54 | 55 | [testenv:coverage] 56 | description = [run locally after tests]: combine coverage data and create report; 57 | generates a diff coverage against origin/main (can be changed by setting DIFF_AGAINST env var) 58 | deps = {[testenv]deps} 59 | coverage 60 | ;diff_cover 61 | skip_install = True 62 | passenv = DIFF_AGAINST 63 | setenv = COVERAGE_FILE={toxworkdir}/.coverage 64 | commands = coverage combine 65 | coverage report -m 66 | coverage xml -o {toxworkdir}/coverage.xml 67 | coverage html -d {toxworkdir}/htmlcov 68 | ;diff-cover --compare-branch {env:DIFF_AGAINST:origin/main} {toxworkdir}/coverage.xml 69 | depends = py37, py38, py39, py310, py311 70 | 71 | [testenv:fix_lint] 72 | description = format the code base to adhere to our styles, and complain about what we cannot do automatically 73 | basepython = python3.8 74 | passenv = 75 | PROGRAMDATA 76 | deps = 77 | {[testenv]deps} 78 | tomlkit 79 | pre-commit >= 2.9.0 80 | skip_install = True 81 | commands = pre-commit run --all-files 82 | python -c 'import pathlib; print("hint: run \{\} install to add checks as pre-commit hook".format(pathlib.Path(r"{envdir}") / "bin" / "pre-commit"))' 83 | 84 | [pytest] 85 | addopts = -ra --ignore=tests/sqlalchemy_test_suite 86 | junit_family = legacy 87 | log_level = info 88 | markers = 89 | # Optional dependency groups markers 90 | lambda: AWS lambda tests 91 | pandas: tests for pandas integration 92 | sso: tests for sso optional dependency integration 93 | # Cloud provider markers 94 | aws: tests for Amazon Cloud storage 95 | azure: tests for Azure Cloud storage 96 | gcp: tests for Google Cloud storage 97 | # Test type markers 98 | integ: integration tests 99 | unit: unit tests 100 | skipolddriver: skip for old driver tests 101 | # Other markers 102 | timeout: tests that need a timeout time 103 | internal: tests that could but should only run on our internal CI 104 | external: tests that could but should only run on our external CI 105 | 106 | [isort] 107 | multi_line_output = 3 108 | include_trailing_comma = True 109 | force_grid_wrap = 0 110 | use_parentheses = True 111 | ensure_newline_before_comments = True 112 | line_length = 88 113 | known_first_party =snowflake,parameters,generate_test_files 114 | 115 | [flake8] 116 | # Notes on ignores: 117 | # - all ignored Ds mean doc issues, these should be cleaned up 118 | ignore = B011,C901,D100,D101,D102,D103,D104,D105,D107,D401,E203,E402,E501,F821,W503 119 | exclude= 120 | build,tool,.tox,parameters.py,parameters_jenkins.py, 121 | # Disable checking virtualenv contents 122 | *venv* 123 | max-line-length = 88 124 | show-source = true 125 | 126 | [coverage:report] 127 | skip_covered = False 128 | show_missing = True 129 | 130 | [coverage:run] 131 | branch = true 132 | parallel = true 133 | 134 | [coverage:paths] 135 | source = 136 | src/snowflake/sqlalchemy 137 | */snowflake/sqlalchemy 138 | *\snowflake\sqlalchemy 139 | */snowflake-sqlalchemy 140 | *\snowflake-sqlalchemy 141 | --------------------------------------------------------------------------------