├── .clang-format ├── .git-blame-ignore-revs ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── BUG-REPORT.yml │ └── FEATURE-REQUEST.yml ├── dependabot.yml ├── pull_request_template.md ├── repo_meta.yaml └── workflows │ ├── build_test.yml │ ├── changelog.yml │ ├── cla_bot.yml │ ├── create_req_files.yml │ ├── jira_close.yml │ ├── jira_comment.yml │ ├── jira_issue.yml │ ├── parameters │ ├── private │ │ ├── jenkins_test_parameters.py.gpg │ │ ├── parameters_aws.py.gpg │ │ ├── parameters_aws_auth_tests.json.gpg │ │ ├── parameters_aws_jenkins.py.gpg │ │ ├── parameters_azure.py.gpg │ │ ├── parameters_azure_jenkins.py.gpg │ │ ├── parameters_gcp.py.gpg │ │ ├── parameters_gcp_jenkins.py.gpg │ │ └── rsa_keys │ │ │ ├── rsa_key.p8.gpg │ │ │ └── rsa_key_invalid.p8.gpg │ └── public │ │ ├── parameters_aws.py.gpg │ │ ├── parameters_azure.py.gpg │ │ └── parameters_gcp.py.gpg │ ├── port_changes_to_sp.yml │ ├── semgrep.yml │ ├── snyk-issue.yml │ ├── snyk-pr.yml │ └── stale_issue_bot.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .wiremock └── ca-cert.jks ├── CONTRIBUTING.md ├── DESCRIPTION.md ├── Jenkinsfile ├── LICENSE.txt ├── MANIFEST.in ├── NOTICE ├── README.md ├── SECURITY.md ├── benchmark └── benchmark_unit_converter.py ├── ci ├── anaconda │ ├── bld.bat │ ├── build.sh │ └── meta.yaml ├── build_darwin.sh ├── build_docker.sh ├── build_linux.sh ├── build_windows.bat ├── change_snowflake_test_pwd.py ├── container │ └── test_authentication.sh ├── docker │ ├── connector_build │ │ ├── Dockerfile │ │ └── scripts │ │ │ └── entrypoint.sh │ ├── connector_test │ │ ├── Dockerfile │ │ └── scripts │ │ │ └── entrypoint.sh │ ├── connector_test_fips │ │ ├── Dockerfile │ │ └── scripts │ │ │ └── entrypoint.sh │ └── connector_test_lambda │ │ ├── Dockerfile310 │ │ ├── Dockerfile311 │ │ ├── Dockerfile312 │ │ ├── Dockerfile313 │ │ ├── Dockerfile39 │ │ └── app.py ├── log_analyze_setup.sh ├── set_base_image.sh ├── test.sh ├── test_authentication.sh ├── test_darwin.sh ├── test_docker.sh ├── test_fips.sh ├── test_fips_docker.sh ├── test_lambda_docker.sh ├── test_linux.sh └── test_windows.bat ├── mypy.ini ├── prober ├── Dockerfile ├── Jenkinsfile.groovy ├── __init__.py ├── entrypoint.sh ├── probes │ ├── __init__.py │ ├── logging_config.py │ ├── login.py │ ├── main.py │ ├── registry.py │ └── testing_matrix.json └── setup.py ├── pyproject.toml ├── samples ├── README.md └── auth_by_key_pair_from_file.py ├── setup.cfg ├── setup.py ├── src └── snowflake │ └── connector │ ├── __init__.py │ ├── _query_context_cache.py │ ├── _sql_util.py │ ├── _utils.py │ ├── arrow_context.py │ ├── auth │ ├── __init__.py │ ├── _auth.py │ ├── _http_server.py │ ├── _oauth_base.py │ ├── by_plugin.py │ ├── default.py │ ├── idtoken.py │ ├── keypair.py │ ├── no_auth.py │ ├── oauth.py │ ├── oauth_code.py │ ├── oauth_credentials.py │ ├── okta.py │ ├── pat.py │ ├── usrpwdmfa.py │ ├── webbrowser.py │ └── workload_identity.py │ ├── azure_storage_client.py │ ├── backoff_policies.py │ ├── bind_upload_agent.py │ ├── cache.py │ ├── compat.py │ ├── config_manager.py │ ├── connection.py │ ├── connection_diagnostic.py │ ├── constants.py │ ├── converter.py │ ├── converter_issue23517.py │ ├── converter_null.py │ ├── converter_snowsql.py │ ├── cursor.py │ ├── dbapi.py │ ├── description.py │ ├── direct_file_operation_utils.py │ ├── encryption_util.py │ ├── errorcode.py │ ├── errors.py │ ├── externals_utils │ ├── __init__.py │ └── externals_setup.py │ ├── feature.py │ ├── file_compression_type.py │ ├── file_lock.py │ ├── file_transfer_agent.py │ ├── file_util.py │ ├── gcs_storage_client.py │ ├── gzip_decoder.py │ ├── local_storage_client.py │ ├── log_configuration.py │ ├── logging_utils │ ├── __init__.py │ └── filters.py │ ├── nanoarrow_cpp │ ├── ArrowIterator │ │ ├── ArrayConverter.cpp │ │ ├── ArrayConverter.hpp │ │ ├── BinaryConverter.cpp │ │ ├── BinaryConverter.hpp │ │ ├── BooleanConverter.cpp │ │ ├── BooleanConverter.hpp │ │ ├── CArrowChunkIterator.cpp │ │ ├── CArrowChunkIterator.hpp │ │ ├── CArrowIterator.cpp │ │ ├── CArrowIterator.hpp │ │ ├── CArrowTableIterator.cpp │ │ ├── CArrowTableIterator.hpp │ │ ├── DateConverter.cpp │ │ ├── DateConverter.hpp │ │ ├── DecFloatConverter.cpp │ │ ├── DecFloatConverter.hpp │ │ ├── DecimalConverter.cpp │ │ ├── DecimalConverter.hpp │ │ ├── FixedSizeListConverter.cpp │ │ ├── FixedSizeListConverter.hpp │ │ ├── FloatConverter.cpp │ │ ├── FloatConverter.hpp │ │ ├── IColumnConverter.hpp │ │ ├── IntConverter.cpp │ │ ├── IntConverter.hpp │ │ ├── IntervalConverter.cpp │ │ ├── IntervalConverter.hpp │ │ ├── LICENSE.txt │ │ ├── MapConverter.cpp │ │ ├── MapConverter.hpp │ │ ├── ObjectConverter.cpp │ │ ├── ObjectConverter.hpp │ │ ├── Python │ │ │ ├── Common.cpp │ │ │ ├── Common.hpp │ │ │ ├── Helpers.cpp │ │ │ └── Helpers.hpp │ │ ├── SnowflakeType.cpp │ │ ├── SnowflakeType.hpp │ │ ├── StringConverter.cpp │ │ ├── StringConverter.hpp │ │ ├── TimeConverter.cpp │ │ ├── TimeConverter.hpp │ │ ├── TimeStampConverter.cpp │ │ ├── TimeStampConverter.hpp │ │ ├── Util │ │ │ ├── macros.hpp │ │ │ ├── time.cpp │ │ │ └── time.hpp │ │ ├── flatcc.c │ │ ├── flatcc │ │ │ ├── flatcc_accessors.h │ │ │ ├── flatcc_alloc.h │ │ │ ├── flatcc_assert.h │ │ │ ├── flatcc_builder.h │ │ │ ├── flatcc_emitter.h │ │ │ ├── flatcc_endian.h │ │ │ ├── flatcc_epilogue.h │ │ │ ├── flatcc_flatbuffers.h │ │ │ ├── flatcc_identifier.h │ │ │ ├── flatcc_iov.h │ │ │ ├── flatcc_prologue.h │ │ │ ├── flatcc_refmap.h │ │ │ ├── flatcc_rtconfig.h │ │ │ ├── flatcc_types.h │ │ │ ├── flatcc_verifier.h │ │ │ └── portable │ │ │ │ ├── flatcc_portable.h │ │ │ │ ├── paligned_alloc.h │ │ │ │ ├── pattributes.h │ │ │ │ ├── pdiagnostic.h │ │ │ │ ├── pdiagnostic_pop.h │ │ │ │ ├── pdiagnostic_push.h │ │ │ │ ├── pendian.h │ │ │ │ ├── pendian_detect.h │ │ │ │ ├── pinline.h │ │ │ │ ├── pinttypes.h │ │ │ │ ├── portable.h │ │ │ │ ├── portable_basic.h │ │ │ │ ├── pstatic_assert.h │ │ │ │ ├── pstdalign.h │ │ │ │ ├── pstdint.h │ │ │ │ ├── punaligned.h │ │ │ │ ├── pversion.h │ │ │ │ └── pwarnings.h │ │ ├── nanoarrow.c │ │ ├── nanoarrow.h │ │ ├── nanoarrow.hpp │ │ ├── nanoarrow_arrow_iterator.pyx │ │ ├── nanoarrow_device.c │ │ ├── nanoarrow_device.h │ │ ├── nanoarrow_ipc.c │ │ └── nanoarrow_ipc.h │ ├── Logging │ │ ├── logging.cpp │ │ └── logging.hpp │ └── scripts │ │ ├── .clang-format │ │ └── format.sh │ ├── network.py │ ├── ocsp_asn1crypto.py │ ├── ocsp_snowflake.py │ ├── options.py │ ├── pandas_tools.py │ ├── proxy.py │ ├── py.typed │ ├── result_batch.py │ ├── result_set.py │ ├── s3_storage_client.py │ ├── secret_detector.py │ ├── sf_dirs.py │ ├── sfbinaryformat.py │ ├── sfdatetime.py │ ├── snow_logging.py │ ├── sqlstate.py │ ├── ssd_internal_keys.py │ ├── ssl_wrap_socket.py │ ├── storage_client.py │ ├── telemetry.py │ ├── telemetry_oob.py │ ├── test_util.py │ ├── time_util.py │ ├── token_cache.py │ ├── tool │ ├── __init__.py │ ├── dump_certs.py │ ├── dump_ocsp_response.py │ ├── dump_ocsp_response_cache.py │ └── probe_connection.py │ ├── url_util.py │ ├── util_text.py │ ├── vendored │ ├── __init__.py │ ├── requests │ │ ├── LICENSE │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── _internal_utils.py │ │ ├── adapters.py │ │ ├── api.py │ │ ├── auth.py │ │ ├── certs.py │ │ ├── compat.py │ │ ├── cookies.py │ │ ├── exceptions.py │ │ ├── help.py │ │ ├── hooks.py │ │ ├── models.py │ │ ├── sessions.py │ │ ├── status_codes.py │ │ ├── structures.py │ │ └── utils.py │ └── urllib3 │ │ ├── LICENSE.txt │ │ ├── __init__.py │ │ ├── _collections.py │ │ ├── _version.py │ │ ├── connection.py │ │ ├── connectionpool.py │ │ ├── contrib │ │ ├── __init__.py │ │ ├── _appengine_environ.py │ │ ├── _securetransport │ │ │ ├── __init__.py │ │ │ ├── bindings.py │ │ │ └── low_level.py │ │ ├── appengine.py │ │ ├── ntlmpool.py │ │ ├── pyopenssl.py │ │ ├── securetransport.py │ │ └── socks.py │ │ ├── exceptions.py │ │ ├── fields.py │ │ ├── filepost.py │ │ ├── packages │ │ ├── __init__.py │ │ ├── backports │ │ │ ├── __init__.py │ │ │ ├── makefile.py │ │ │ └── weakref_finalize.py │ │ └── six.py │ │ ├── poolmanager.py │ │ ├── request.py │ │ ├── response.py │ │ └── util │ │ ├── __init__.py │ │ ├── connection.py │ │ ├── proxy.py │ │ ├── queue.py │ │ ├── request.py │ │ ├── response.py │ │ ├── retry.py │ │ ├── ssl_.py │ │ ├── ssl_match_hostname.py │ │ ├── ssltransport.py │ │ ├── timeout.py │ │ ├── url.py │ │ └── wait.py │ ├── version.py │ └── wif_util.py ├── test ├── README.md ├── __init__.py ├── auth │ ├── __init__.py │ ├── authorization_parameters.py │ ├── authorization_test_helper.py │ ├── test_external_browser.py │ ├── test_key_pair.py │ ├── test_oauth.py │ ├── test_okta.py │ ├── test_okta_authorization_code.py │ ├── test_okta_client_credentials.py │ ├── test_pat.py │ ├── test_snowflake_authorization_code.py │ └── test_snowflake_authorization_code_wildcards.py ├── conftest.py ├── csp_helpers.py ├── data │ ├── ExecPlatform │ │ └── Database │ │ │ └── data │ │ │ ├── orders_100.csv │ │ │ └── orders_101.csv │ ├── TestOrcFile.test1.orc │ ├── brotli_sample.txt.br │ ├── bzip2_sample.txt.bz2 │ ├── cert_tests │ │ ├── incomplete-chain.pem │ │ ├── production │ │ │ ├── addtrust.crt │ │ │ ├── networksolutions.crt │ │ │ ├── snowflakecomputing.crt │ │ │ └── usertrust.crt │ │ └── revoked_certs.pem │ ├── example.json │ ├── gzip_sample.txt.gz │ ├── multiple_statements.sql │ ├── multiple_statements_negative.sql │ ├── nation.impala.parquet │ ├── put_get_1.txt │ ├── rsa_keys │ │ ├── private.pem │ │ ├── privatekey2.pem │ │ ├── public.pem │ │ ├── publickey2.pem │ │ └── rsa_key_encrypted.p8 │ ├── test_arrow_data │ ├── wiremock │ │ └── mappings │ │ │ ├── auth │ │ │ ├── oauth │ │ │ │ ├── authorization_code │ │ │ │ │ ├── browser_timeout_authorization_error.json │ │ │ │ │ ├── external_idp_custom_urls.json │ │ │ │ │ ├── invalid_scope_error.json │ │ │ │ │ ├── invalid_state_error.json │ │ │ │ │ ├── new_tokens_after_failed_refresh.json │ │ │ │ │ ├── successful_auth_after_failed_refresh.json │ │ │ │ │ ├── successful_flow.json │ │ │ │ │ └── token_request_error.json │ │ │ │ ├── client_credentials │ │ │ │ │ ├── successful_auth_after_failed_refresh.json │ │ │ │ │ ├── successful_flow.json │ │ │ │ │ └── token_request_error.json │ │ │ │ └── refresh_token │ │ │ │ │ ├── refresh_failed.json │ │ │ │ │ └── refresh_successful.json │ │ │ └── pat │ │ │ │ ├── invalid_token.json │ │ │ │ └── successful_flow.json │ │ │ └── generic │ │ │ ├── snowflake_disconnect_successful.json │ │ │ ├── snowflake_login_failed.json │ │ │ └── snowflake_login_successful.json │ └── zstd_sample.txt.zst ├── extras │ ├── README.md │ ├── __init__.py │ ├── run.py │ └── simple_select1.py ├── generate_test_files.py ├── helpers.py ├── integ │ ├── __init__.py │ ├── conftest.py │ ├── lambda │ │ ├── __init__.py │ │ └── test_basic_query.py │ ├── pandas │ │ ├── __init__.py │ │ ├── test_arrow_chunk_iterator.py │ │ ├── test_arrow_pandas.py │ │ ├── test_error_arrow_pandas_stream.py │ │ ├── test_logging.py │ │ ├── test_pandas_tools.py │ │ ├── test_unit_arrow_chunk_iterator.py │ │ └── test_unit_options.py │ ├── sso │ │ ├── __init__.py │ │ ├── test_connection_manual.py │ │ ├── test_unit_mfa_cache.py │ │ └── test_unit_sso_connection.py │ ├── test_arrow_result.py │ ├── test_async.py │ ├── test_autocommit.py │ ├── test_bindings.py │ ├── test_boolean.py │ ├── test_client_session_keep_alive.py │ ├── test_concurrent_create_objects.py │ ├── test_concurrent_insert.py │ ├── test_connection.py │ ├── test_converter.py │ ├── test_converter_more_timestamp.py │ ├── test_converter_null.py │ ├── test_cursor.py │ ├── test_cursor_binding.py │ ├── test_cursor_context_manager.py │ ├── test_dataintegrity.py │ ├── test_daylight_savings.py │ ├── test_dbapi.py │ ├── test_decfloat.py │ ├── test_direct_file_operation_utils.py │ ├── test_easy_logging.py │ ├── test_errors.py │ ├── test_execute_multi_statements.py │ ├── test_key_pair_authentication.py │ ├── test_large_put.py │ ├── test_large_result_set.py │ ├── test_load_unload.py │ ├── test_multi_statement.py │ ├── test_network.py │ ├── test_numpy_binding.py │ ├── test_pickle_timestamp_tz.py │ ├── test_put_get.py │ ├── test_put_get_compress_enc.py │ ├── test_put_get_medium.py │ ├── test_put_get_snow_4525.py │ ├── test_put_get_user_stage.py │ ├── test_put_get_with_aws_token.py │ ├── test_put_get_with_azure_token.py │ ├── test_put_get_with_gcp_account.py │ ├── test_put_windows_path.py │ ├── test_qmark.py │ ├── test_query_cancelling.py │ ├── test_results.py │ ├── test_reuse_cursor.py │ ├── test_session_parameters.py │ ├── test_snowsql_timestamp_format.py │ ├── test_statement_parameter_binding.py │ ├── test_structured_types.py │ ├── test_transaction.py │ └── test_vendored_urllib.py ├── integ_helpers.py ├── lazy_var.py ├── randomize.py ├── stress │ ├── README.md │ ├── __init__.py │ ├── dev_requirements.txt │ ├── e2e_iterator.py │ ├── local_iterator.py │ ├── stress_test_data │ │ ├── README.md │ │ ├── test_data_all_types │ │ └── test_multi_column_row_decimal_data │ └── util.py ├── unit │ ├── __init__.py │ ├── conftest.py │ ├── mock_utils.py │ ├── test_auth.py │ ├── test_auth_callback_server.py │ ├── test_auth_keypair.py │ ├── test_auth_mfa.py │ ├── test_auth_no_auth.py │ ├── test_auth_oauth.py │ ├── test_auth_oauth_auth_code.py │ ├── test_auth_okta.py │ ├── test_auth_webbrowser.py │ ├── test_auth_workload_identity.py │ ├── test_backoff_policies.py │ ├── test_binaryformat.py │ ├── test_bind_upload_agent.py │ ├── test_cache.py │ ├── test_compute_chunk_size.py │ ├── test_configmanager.py │ ├── test_connection.py │ ├── test_connection_diagnostic.py │ ├── test_construct_hostname.py │ ├── test_converter.py │ ├── test_cursor.py │ ├── test_datetime.py │ ├── test_dbapi.py │ ├── test_dependencies.py │ ├── test_easy_logging.py │ ├── test_encryption_util.py │ ├── test_error_arrow_stream.py │ ├── test_errors.py │ ├── test_gcs_client.py │ ├── test_linux_local_file_cache.py │ ├── test_local_storage_client.py │ ├── test_log_secret_detector.py │ ├── test_mfa_no_cache.py │ ├── test_network.py │ ├── test_oauth_token.py │ ├── test_ocsp.py │ ├── test_oob_secret_detector.py │ ├── test_parse_account.py │ ├── test_programmatic_access_token.py │ ├── test_proxies.py │ ├── test_put_get.py │ ├── test_query_context_cache.py │ ├── test_renew_session.py │ ├── test_result_batch.py │ ├── test_retry_network.py │ ├── test_s3_util.py │ ├── test_session_manager.py │ ├── test_split_statement.py │ ├── test_storage_client.py │ ├── test_telemetry.py │ ├── test_telemetry_oob.py │ ├── test_text_util.py │ ├── test_url_util.py │ ├── test_util.py │ └── test_wiremock_client.py └── wiremock │ ├── __init__.py │ └── wiremock_utils.py ├── tested_requirements ├── README.md ├── requirements_310.reqs ├── requirements_311.reqs ├── requirements_312.reqs ├── requirements_313.reqs └── requirements_39.reqs └── tox.ini /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Since version 2.23 (released in August 2019), git-blame has a feature 2 | # to ignore or bypass certain commits. 3 | # 4 | # This file contains a list of commits that are not likely what you 5 | # are looking for in a blame, such as mass reformatting or renaming. 6 | # You can set this file as a default ignore file for blame by running 7 | # the following command. 8 | # 9 | # $ git config blame.ignoreRevsFile .git-blame-ignore-revs 10 | 11 | # Format of files with psf/black 12 | 998940692da07a0c2984f1963ace71731dcc11bc 13 | 14 | # License header update(s) 15 | d695d7d159ea94d6211199b7ff40cbc66f5a1dde 16 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | @snowflakedb/snow-drivers-warsaw 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 💡 2 | description: Suggest a new idea for the project. 3 | labels: ["feature", "needs triage"] 4 | 5 | body: 6 | - type: textarea 7 | id: current-behavior 8 | attributes: 9 | label: What is the current behavior? 10 | validations: 11 | required: true 12 | - type: textarea 13 | id: desired-behavior 14 | attributes: 15 | label: What is the desired behavior? 16 | validations: 17 | required: true 18 | - type: textarea 19 | id: how-improve 20 | attributes: 21 | label: How would this improve `snowflake-connector-python`? 22 | validations: 23 | required: true 24 | - type: textarea 25 | id: others 26 | attributes: 27 | label: References and other background 28 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | assignees: 8 | - "sfc-gh-mkeller" 9 | reviewers: 10 | - "snowflakedb/snowpark-python-api" 11 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Please answer these questions before submitting your pull requests. Thanks! 2 | 3 | 1. What GitHub issue is this PR addressing? Make sure that there is an accompanying issue to your PR. 4 | 5 | Fixes #NNNN 6 | 7 | 2. Fill out the following pre-review checklist: 8 | 9 | - [ ] I am adding a new automated test(s) to verify correctness of my new code 10 | - [ ] I am adding new logging messages 11 | - [ ] I am adding a new telemetry message 12 | - [ ] I am modifying authorization mechanisms 13 | - [ ] I am adding new credentials 14 | - [ ] I am modifying OCSP code 15 | - [ ] I am adding a new dependency 16 | 17 | 3. Please describe how your code solves the related issue. 18 | 19 | Please write a short description of how your code change solves the related issue. 20 | 21 | 4. (Optional) PR for stored-proc connector: 22 | -------------------------------------------------------------------------------- /.github/repo_meta.yaml: -------------------------------------------------------------------------------- 1 | point_of_contact: @snowflakedb/client 2 | production: true 3 | code_owners_file_present: true 4 | release_branches: 5 | - main 6 | jira_area: Snowpark: Clients and Libraries 7 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog Check 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, labeled, unlabeled] 6 | branches: 7 | - main 8 | 9 | jobs: 10 | check_change_log: 11 | runs-on: ubuntu-latest 12 | if: ${{!contains(github.event.pull_request.labels.*.name, 'NO-CHANGELOG-UPDATES')}} 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v3 16 | with: 17 | fetch-depth: 0 18 | 19 | - name: Ensure DESCRIPTION.md is updated 20 | run: git diff --name-only --diff-filter=ACMRT ${{ github.event.pull_request.base.sha }} ${{ github.sha }} | grep -wq "DESCRIPTION.md" 21 | -------------------------------------------------------------------------------- /.github/workflows/cla_bot.yml: -------------------------------------------------------------------------------- 1 | name: "CLA Assistant" 2 | on: 3 | issue_comment: 4 | types: [created] 5 | pull_request_target: 6 | types: [opened,closed,synchronize] 7 | 8 | jobs: 9 | CLAssistant: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | actions: write 13 | contents: write 14 | pull-requests: write 15 | statuses: write 16 | steps: 17 | - name: "CLA Assistant" 18 | if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' 19 | uses: contributor-assistant/github-action/@master 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_BOT_TOKEN }} 23 | with: 24 | path-to-signatures: 'signatures/version1.json' 25 | path-to-document: 'https://github.com/snowflakedb/CLA/blob/main/README.md' 26 | branch: 'main' 27 | allowlist: 'dependabot[bot],github-actions,Jenkins User,sfc-gh-snyk-sca-sa' 28 | remote-organization-name: 'snowflakedb' 29 | remote-repository-name: 'cla-db' 30 | -------------------------------------------------------------------------------- /.github/workflows/jira_close.yml: -------------------------------------------------------------------------------- 1 | name: Jira closure 2 | 3 | on: 4 | issues: 5 | types: [closed, deleted] 6 | 7 | jobs: 8 | close-issue: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v3 13 | with: 14 | repository: snowflakedb/gh-actions 15 | ref: jira_v1 16 | token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets 17 | path: . 18 | - name: Jira login 19 | uses: atlassian/gajira-login@master 20 | env: 21 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 22 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 23 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 24 | - name: Extract issue from title 25 | id: extract 26 | env: 27 | TITLE: "${{ github.event.issue.title }}" 28 | run: | 29 | jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') 30 | echo ::set-output name=jira::$jira 31 | - name: Close issue 32 | uses: ./jira/gajira-close 33 | if: startsWith(steps.extract.outputs.jira, 'SNOW-') 34 | with: 35 | issue: "${{ steps.extract.outputs.jira }}" 36 | -------------------------------------------------------------------------------- /.github/workflows/jira_comment.yml: -------------------------------------------------------------------------------- 1 | name: Jira comment 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | jobs: 8 | comment-issue: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Jira login 12 | uses: atlassian/gajira-login@master 13 | env: 14 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 15 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 16 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 17 | - name: Extract issue from title 18 | id: extract 19 | env: 20 | TITLE: "${{ github.event.issue.title }}" 21 | run: | 22 | jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') 23 | echo ::set-output name=jira::$jira 24 | - name: Comment on issue 25 | uses: atlassian/gajira-comment@master 26 | if: startsWith(steps.extract.outputs.jira, 'SNOW-') 27 | with: 28 | issue: "${{ steps.extract.outputs.jira }}" 29 | comment: "${{ github.event.comment.user.login }} commented:\n\n${{ github.event.comment.body }}\n\n${{ github.event.comment.html_url }}" 30 | -------------------------------------------------------------------------------- /.github/workflows/parameters/private/jenkins_test_parameters.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/jenkins_test_parameters.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_aws.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_aws.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_aws_auth_tests.json.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_aws_auth_tests.json.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_aws_jenkins.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_aws_jenkins.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_azure.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_azure.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_azure_jenkins.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_azure_jenkins.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_gcp.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_gcp.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/parameters_gcp_jenkins.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/parameters_gcp_jenkins.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/rsa_keys/rsa_key.p8.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/rsa_keys/rsa_key.p8.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/private/rsa_keys/rsa_key_invalid.p8.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/private/rsa_keys/rsa_key_invalid.p8.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/public/parameters_aws.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/public/parameters_aws.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/public/parameters_azure.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/public/parameters_azure.py.gpg -------------------------------------------------------------------------------- /.github/workflows/parameters/public/parameters_gcp.py.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.github/workflows/parameters/public/parameters_gcp.py.gpg -------------------------------------------------------------------------------- /.github/workflows/port_changes_to_sp.yml: -------------------------------------------------------------------------------- 1 | name: Port Changes to SP 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, labeled, unlabeled] 6 | branches: 7 | - main 8 | 9 | jobs: 10 | port_changes_to_sp: 11 | runs-on: ubuntu-latest 12 | if: ${{!contains(github.event.pull_request.labels.*.name, 'DO_NOT_PORT_CHANGES_TO_SP')}} 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v3 16 | with: 17 | fetch-depth: 0 18 | 19 | - name: Check PR description and labels 20 | run: | 21 | regex_pattern=".*https://github.com/snowflakedb/Stored-Proc-Python-Connector/pull/[0-9]+.*" 22 | description=$(jq -r '.pull_request.body' $GITHUB_EVENT_PATH) 23 | 24 | if [[ ! $description =~ $regex_pattern ]]; then 25 | echo "Error: PR description must contain a regex or label DO_NOT_PORT_CHANGES_TO_SP" 26 | exit 1 27 | fi 28 | -------------------------------------------------------------------------------- /.github/workflows/semgrep.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Run semgrep checks 3 | 4 | on: 5 | pull_request: 6 | branches: [main] 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | run-semgrep-reusable-workflow: 13 | uses: snowflakedb/reusable-workflows/.github/workflows/semgrep-v2.yml@main 14 | secrets: 15 | token: ${{ secrets.SEMGREP_APP_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/snyk-issue.yml: -------------------------------------------------------------------------------- 1 | name: Snyk Issue 2 | 3 | on: 4 | schedule: 5 | - cron: '* */12 * * *' 6 | 7 | permissions: 8 | contents: read 9 | issues: write 10 | pull-requests: write 11 | 12 | concurrency: snyk-issue 13 | 14 | jobs: 15 | snyk: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: checkout action 19 | uses: actions/checkout@v4 20 | with: 21 | repository: snowflakedb/whitesource-actions 22 | token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }} 23 | path: whitesource-actions 24 | - name: set-env 25 | run: echo "REPO=$(basename $GITHUB_REPOSITORY)" >> $GITHUB_ENV 26 | - name: Jira Creation 27 | uses: ./whitesource-actions/snyk-issue 28 | with: 29 | snyk_org: ${{ secrets.SNYK_ORG_ID_PUBLIC_REPO }} 30 | snyk_token: ${{ secrets.SNYK_GITHUB_INTEGRATION_TOKEN_PUBLIC_REPO }} 31 | jira_token: ${{ secrets.JIRA_TOKEN_PUBLIC_REPO }} 32 | env: 33 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 34 | -------------------------------------------------------------------------------- /.github/workflows/snyk-pr.yml: -------------------------------------------------------------------------------- 1 | name: Snyk PR 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | 7 | permissions: 8 | contents: read 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | snyk: 14 | runs-on: ubuntu-latest 15 | if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }} 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v3 19 | with: 20 | ref: ${{ github.event.pull_request.head.ref }} 21 | fetch-depth: 0 22 | 23 | - name: Checkout Action 24 | uses: actions/checkout@v3 25 | with: 26 | repository: snowflakedb/whitesource-actions 27 | token: ${{ secrets.whitesource_action_token }} 28 | path: whitesource-actions 29 | 30 | - name: Snyk Pull Request Scan Check 31 | uses: ./whitesource-actions/snyk-pr 32 | env: 33 | pr_title: ${{ github.event.pull_request.title }} 34 | with: 35 | jira_token: ${{ secrets.jira_token_public_repo }} 36 | gh_token: ${{ secrets.github_token }} 37 | amend: false 38 | -------------------------------------------------------------------------------- /.github/workflows/stale_issue_bot.yml: -------------------------------------------------------------------------------- 1 | name: Close Stale Issues 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | staleDays: 6 | required: true 7 | 8 | 9 | jobs: 10 | stale: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/stale@v7 14 | with: 15 | close-issue-message: 'To clean up and re-prioritize bugs and feature requests we are closing all issues older than 6 months as of March 1, 2023. If there are any issues or feature requests that you would like us to address, please re-create them. For urgent issues, opening a support case with this link [Snowflake Community](https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge) is the fastest way to get a response' 16 | days-before-issue-stale: ${{ inputs.staleDays }} 17 | days-before-pr-stale: -1 18 | # Stale issues are closed immediately 19 | days-before-issue-close: 0 20 | days-before-pr-close: -1 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /.wiremock/ca-cert.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/.wiremock/ca-cert.jks -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to snowflake-connector-python 2 | 3 | Hi, thank you for taking the time to improve Snowflake's Python connector! 4 | 5 | ## I have a feature request, or a bug report to submit 6 | 7 | Many questions can be answered by checking our [docs](https://docs.snowflake.com/) or looking for already existing bug reports and enhancement requests on our [issue tracker](https://github.com/snowflakedb/snowflake-connector-python/issues). 8 | 9 | Please start by checking these first! 10 | 11 | ## Nobody else had my idea/issue 12 | 13 | In that case we'd love to hear from you! 14 | Please [open a new issue](https://github.com/snowflakedb/snowflake-connector-python/issues/new/choose) to get in touch with us. 15 | 16 | ## I'd like to contribute the bug fix or feature myself 17 | 18 | We encourage everyone to first open an issue to discuss any feature work or bug fixes with one of the maintainers. 19 | This should help guide contributors through potential pitfalls. 20 | 21 | ## Contributor License Agreement ("CLA") 22 | 23 | We require our contributors to sign a CLA, available at https://github.com/snowflakedb/CLA/blob/main/README.md. A Github Actions bot will assist you when you open a pull request. 24 | 25 | ### Setup a development environment 26 | 27 | What is a development environment? It's a [virtualenv](https://virtualenv.pypa.io) that has all of necessary 28 | dependencies installed with `snowflake-connector-python` installed as an editable package. 29 | 30 | Setting up a development environment is super easy with this [one simple tox command](https://tox.wiki/en/latest/example/devenv.html). 31 | 32 | ```shell 33 | tox --devenv venv37 -e py37 34 | . venv37/bin/activate 35 | ``` 36 | 37 | Note: we suggest using the lowest supported Python version for development. 38 | 39 | To run tests, please see our [testing README](test/README.md). 40 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include *.rst 3 | include LICENSE.txt 4 | include NOTICE 5 | include pyproject.toml 6 | include src/snowflake/connector/nanoarrow_cpp/ArrowIterator/LICENSE.txt 7 | recursive-include src/snowflake/connector py.typed *.py *.pyx 8 | recursive-include src/snowflake/connector/vendored LICENSE* 9 | 10 | recursive-include src/snowflake/connector/nanoarrow_cpp *.cpp *.hpp 11 | recursive-include src/snowflake/connector/nanoarrow_cpp *.c *.h 12 | exclude src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_arrow_iterator.cpp 13 | exclude src/snowflake/connector/nanoarrow_cpp/scripts/.clang-format 14 | exclude src/snowflake/connector/nanoarrow_cpp/scripts/format.sh 15 | 16 | exclude .git-blame-ignore-revs 17 | exclude .pre-commit-config.yaml 18 | exclude license_header.txt 19 | exclude tox.ini 20 | exclude mypy.ini 21 | exclude .clang-format 22 | exclude .wiremock/* 23 | 24 | prune ci 25 | prune benchmark 26 | prune test 27 | prune tested_requirements 28 | prune src/snowflake/connector/nanoarrow_cpp/scripts 29 | prune __pycache__ 30 | prune samples 31 | prune prober 32 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Snowflake Python Connector 2 | Copyright 2020 Snowflake Inc. 3 | 4 | This software includes software derived from urllib3, licensed under the MIT license (https://urllib3.readthedocs.io). 5 | Copyright (c) 2008-2020 Andrey Petrov and contributors 6 | 7 | This software includes software derived from Requests: HTTP For Humans, licensed under the Apache license, developed by the Python Software Foundation (https://requests.readthedocs.io/) 8 | Requests Copyright 2019 Kenneth Reitz 9 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | 4 | Please refer to the Snowflake [HackerOne program](https://hackerone.com/snowflake?type=team) for our security policies and for reporting any security vulnerabilities. 5 | 6 | For other security related questions and concerns, please contact the Snowflake security team at security@snowflake.com 7 | -------------------------------------------------------------------------------- /benchmark/benchmark_unit_converter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import annotations 4 | 5 | from logging import getLogger 6 | 7 | from snowflake.connector.converter_snowsql import SnowflakeConverterSnowSQL 8 | 9 | logger = getLogger(__name__) 10 | 11 | ConverterSnowSQL = SnowflakeConverterSnowSQL 12 | 13 | 14 | def test_benchmark_date_converter(): 15 | conv = ConverterSnowSQL(support_negative_year=True) 16 | conv.set_parameter("DATE_OUTPUT_FORMAT", "YY-MM-DD") 17 | m = conv.to_python_method("DATE", {"scale": 0}) 18 | current_date_counter = 12345 19 | for _ in range(2000000): 20 | m(current_date_counter) 21 | 22 | 23 | def test_benchmark_date_without_negative_converter(): 24 | conv = ConverterSnowSQL(support_negative_year=False) 25 | conv.set_parameter("DATE_OUTPUT_FORMAT", "YY-MM-DD") 26 | m = conv.to_python_method("DATE", {"scale": 0}) 27 | current_date_counter = 12345 28 | for _ in range(2000000): 29 | m(current_date_counter) 30 | 31 | 32 | def test_benchmark_timestamp_converter(): 33 | conv = ConverterSnowSQL(support_negative_year=True) 34 | conv.set_parameter("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF9") 35 | m = conv.to_python_method("TIMESTAMP_NTZ", {"scale": 9}) 36 | current_timestamp = "2208943503.876543211" 37 | for _ in range(2000000): 38 | m(current_timestamp) 39 | 40 | 41 | def test_benchmark_timestamp_without_negative_converter(): 42 | conv = ConverterSnowSQL(support_negative_year=False) 43 | conv.set_parameter("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF9") 44 | m = conv.to_python_method("TIMESTAMP_NTZ", {"scale": 9}) 45 | current_timestamp = "2208943503.876543211" 46 | for _ in range(2000000): 47 | m(current_timestamp) 48 | -------------------------------------------------------------------------------- /ci/anaconda/bld.bat: -------------------------------------------------------------------------------- 1 | $PYTHON setup.py install 2 | -------------------------------------------------------------------------------- /ci/anaconda/build.sh: -------------------------------------------------------------------------------- 1 | $PYTHON setup.py install --single-version-externally-managed --record=record.txt 2 | -------------------------------------------------------------------------------- /ci/anaconda/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: snowflake_connector_python 3 | version: "1.2.3" 4 | 5 | source: 6 | path: /tmp/anaconda_workspace/src 7 | 8 | requirements: 9 | build: 10 | - python 11 | - setuptools 12 | 13 | run: 14 | - python 15 | - boto3 ==1.3.1 16 | - botocore ==1.4.26 17 | - future 18 | - six 19 | - pytz 20 | - pycrypto ==2.6.1 21 | - pyopenssl ==0.15.1 22 | - cryptography ==1.2.3 23 | - cffi ==1.6.0 24 | 25 | about: 26 | home: https://www.snowflake.com/ 27 | license: Apache 2.0 28 | license_file: /tmp/anaconda_workspace/src/LICENSE.txt 29 | summary: Snowflake Connector for Python 30 | -------------------------------------------------------------------------------- /ci/build_darwin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Build Snowflake Python Connector on Mac 4 | # NOTES: 5 | # - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.9 3.10"` 6 | PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" 7 | 8 | THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | CONNECTOR_DIR="$(dirname "${THIS_DIR}")" 10 | DIST_DIR="$CONNECTOR_DIR/dist" 11 | 12 | cd $CONNECTOR_DIR 13 | # Clean up previously built DIST_DIR 14 | if [ -d "${DIST_DIR}" ]; then 15 | echo "[WARN] ${DIST_DIR} already existing, deleting it..." 16 | rm -rf "${DIST_DIR}" 17 | fi 18 | mkdir -p ${DIST_DIR} 19 | 20 | # Make sure we build for our lowest target 21 | # Should be kept in sync with .github/worklfows/build_test.yml 22 | export MACOSX_DEPLOYMENT_TARGET="10.14" 23 | for PYTHON_VERSION in ${PYTHON_VERSIONS}; do 24 | # Constants and setup 25 | PYTHON="python${PYTHON_VERSION}" 26 | VENV_DIR="${CONNECTOR_DIR}/venv-${PYTHON_VERSION}" 27 | 28 | # Need to create a venv to update build dependencies 29 | ${PYTHON} -m venv ${VENV_DIR} 30 | source ${VENV_DIR}/bin/activate 31 | echo "[Info] Created and activated new venv at ${VENV_DIR}" 32 | 33 | # Build 34 | echo "[Info] Creating a wheel: snowflake_connector using $PYTHON" 35 | # Clean up possible build artifacts 36 | rm -rf build generated_version.py 37 | # Update PEP-517 dependencies 38 | python -m pip install -U pip setuptools wheel build 39 | # Use new PEP-517 build 40 | python -m build --wheel . 41 | deactivate 42 | echo "[Info] Deleting venv at ${VENV_DIR}" 43 | rm -rf ${VENV_DIR} 44 | done 45 | -------------------------------------------------------------------------------- /ci/build_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Build Snowflake Python Connector in Docker 4 | # NOTES: 5 | # - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.9 3.10"` 6 | set -o pipefail 7 | 8 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 9 | source $THIS_DIR/set_base_image.sh 10 | CONNECTOR_DIR="$( dirname "${THIS_DIR}")" 11 | 12 | mkdir -p $CONNECTOR_DIR/dist 13 | cd $THIS_DIR/docker/connector_build 14 | 15 | CONTAINER_NAME=build_pyconnector 16 | arch=$(uname -p) 17 | 18 | echo "[Info] Building docker image" 19 | if [[ "$arch" == "aarch64" ]]; then 20 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014AARCH64 21 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-arm64 22 | else 23 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014 24 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 25 | fi 26 | 27 | docker build --pull -t ${CONTAINER_NAME}:1.0 --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" . -f Dockerfile 28 | 29 | echo "[Info] Building Python Connector" 30 | user_id=$(id -u ${USER}) 31 | docker run \ 32 | -e TERM=vt102 \ 33 | -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ 34 | -e LOCAL_USER_ID=${user_id} \ 35 | --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ 36 | ${CONTAINER_NAME}:1.0 \ 37 | /home/user/snowflake-connector-python/ci/build_linux.sh $1 38 | -------------------------------------------------------------------------------- /ci/build_windows.bat: -------------------------------------------------------------------------------- 1 | :: 2 | :: Build Snowflake Python Connector on Windows 3 | :: NOTES: 4 | :: - This is designed to ONLY be called in our Windows workers in Jenkins 5 | :: - To restrict what version gets created edit this file 6 | SET SCRIPT_DIR=%~dp0 7 | SET CONNECTOR_DIR=%~dp0\..\ 8 | 9 | set python_versions= 3.9 3.10 3.11 3.12 3.13 10 | 11 | cd %CONNECTOR_DIR% 12 | 13 | set venv_dir=%WORKSPACE%\venv-flake8 14 | if %errorlevel% neq 0 goto :error 15 | 16 | py -3.9 -m venv %venv_dir% 17 | if %errorlevel% neq 0 goto :error 18 | 19 | call %venv_dir%\scripts\activate 20 | if %errorlevel% neq 0 goto :error 21 | 22 | python -m pip install --upgrade pip awscli setuptools wheel 23 | if %errorlevel% neq 0 goto :error 24 | 25 | (for %%v in (%python_versions%) do ( 26 | call :build_wheel_file %%v || goto :error 27 | )) 28 | 29 | call deactivate 30 | 31 | dir dist 32 | 33 | EXIT /B %ERRORLEVEL% 34 | 35 | :build_wheel_file 36 | set pv=%~1 37 | 38 | echo Going to compile wheel for Python %pv% 39 | py -%pv% -m pip install --upgrade pip setuptools wheel build 40 | if %errorlevel% neq 0 goto :error 41 | 42 | py -%pv% -m build --wheel . 43 | if %errorlevel% neq 0 goto :error 44 | 45 | EXIT /B 0 46 | 47 | :error 48 | exit /b %errorlevel% 49 | -------------------------------------------------------------------------------- /ci/change_snowflake_test_pwd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Set a complex password for test user snowman 4 | # 5 | from __future__ import annotations 6 | 7 | import os 8 | import sys 9 | 10 | import snowflake.connector 11 | 12 | sys.path.append( 13 | os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test") 14 | ) 15 | 16 | CLIENT_KNOWN_SSM_FILE_PATH_DOCKER = "CLIENT_KNOWN_SSM_FILE_PATH_DOCKER" 17 | 18 | 19 | def change_password(): 20 | params = { 21 | "account": "", 22 | "user": "", 23 | "password": "", 24 | "database": "", 25 | "schema": "", 26 | "protocol": "https", 27 | "host": "", 28 | "port": "443", 29 | } 30 | 31 | for k, v in CONNECTION_PARAMETERS.items(): 32 | params[k] = v 33 | 34 | conn = snowflake.connector.connect(**params) 35 | conn.cursor().execute("use role accountadmin") 36 | cmd = f"alter user set password = '{SNOWFLAKE_TEST_PASSWORD_NEW}'" 37 | print(cmd) 38 | conn.cursor().execute(cmd) 39 | conn.close() 40 | 41 | 42 | def generate_known_ssm_file(): 43 | with open(os.getenv(CLIENT_KNOWN_SSM_FILE_PATH_DOCKER), "w") as f: 44 | f.write(SNOWFLAKE_TEST_PASSWORD_NEW + "\n") 45 | 46 | 47 | if __name__ == "__main__": 48 | from jenkins_test_parameters import SNOWFLAKE_TEST_PASSWORD_NEW 49 | 50 | from parameters import CONNECTION_PARAMETERS 51 | 52 | change_password() 53 | generate_known_ssm_file() 54 | -------------------------------------------------------------------------------- /ci/container/test_authentication.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | set -o pipefail 4 | 5 | 6 | export WORKSPACE=${WORKSPACE:-/mnt/workspace} 7 | export SOURCE_ROOT=${SOURCE_ROOT:-/mnt/host} 8 | 9 | MVNW_EXE=$SOURCE_ROOT/mvnw 10 | AUTH_PARAMETER_FILE=./.github/workflows/parameters/private/parameters_aws_auth_tests.json 11 | eval $(jq -r '.authtestparams | to_entries | map("export \(.key)=\(.value|tostring)")|.[]' $AUTH_PARAMETER_FILE) 12 | 13 | export SNOWFLAKE_AUTH_TEST_PRIVATE_KEY_PATH=./.github/workflows/parameters/private/rsa_keys/rsa_key.p8 14 | export SNOWFLAKE_AUTH_TEST_INVALID_PRIVATE_KEY_PATH=./.github/workflows/parameters/private/rsa_keys/rsa_key_invalid.p8 15 | 16 | export SF_OCSP_TEST_MODE=true 17 | export SF_ENABLE_EXPERIMENTAL_AUTHENTICATION=true 18 | export RUN_AUTH_TESTS=true 19 | export AUTHENTICATION_TESTS_ENV="docker" 20 | export PYTHONPATH=$SOURCE_ROOT 21 | 22 | python3 -m pip install --break-system-packages -e . 23 | 24 | python3 -m pytest test/auth/* 25 | -------------------------------------------------------------------------------- /ci/docker/connector_build/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE=quay.io/pypa/manylinux2014_x86_64 2 | FROM $BASE_IMAGE 3 | 4 | # This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ 5 | ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 6 | ENV GOSU_PATH $GOSU_URL 7 | RUN curl -o /usr/local/bin/gosu -SL $GOSU_PATH 8 | RUN chmod +x /usr/local/bin/gosu 9 | 10 | COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh 11 | RUN chmod +x /usr/local/bin/entrypoint.sh 12 | 13 | WORKDIR /home/user 14 | RUN chmod 777 /home/user 15 | RUN git clone https://github.com/matthew-brett/multibuild.git && cd /home/user/multibuild && git checkout bfc6d8b82d8c37b8ca1e386081fd800e81c6ab4a 16 | 17 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 18 | -------------------------------------------------------------------------------- /ci/docker/connector_build/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add local user 4 | # Either use the LOCAL_USER_ID if passed in at runtime or 5 | # fallback 6 | 7 | USER_ID=${LOCAL_USER_ID:-9001} 8 | 9 | echo "Starting with UID : $USER_ID" 10 | useradd --shell /bin/bash -u $USER_ID -o -c "" -m user 11 | export HOME=/home/user 12 | 13 | /usr/local/bin/gosu user "$@" 14 | -------------------------------------------------------------------------------- /ci/docker/connector_test/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE=quay.io/pypa/manylinux2014_x86_64 2 | FROM $BASE_IMAGE 3 | 4 | RUN yum install -y java-11-openjdk 5 | 6 | # Our dependencies rely on the Rust toolchain being available in the build-time environment (https://github.com/pyca/cryptography/issues/5771) 7 | RUN yum -y install rust cargo 8 | 9 | # This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ 10 | ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 11 | ENV GOSU_PATH $GOSU_URL 12 | RUN curl -o /usr/local/bin/gosu -SL $GOSU_PATH 13 | RUN chmod +x /usr/local/bin/gosu 14 | 15 | COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh 16 | RUN chmod +x /usr/local/bin/entrypoint.sh 17 | 18 | WORKDIR /home/user 19 | RUN chmod 777 /home/user 20 | 21 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 22 | -------------------------------------------------------------------------------- /ci/docker/connector_test/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add local user 4 | # Either use the LOCAL_USER_ID if passed in at runtime or 5 | # fallback 6 | 7 | USER_ID=${LOCAL_USER_ID:-9001} 8 | 9 | echo "Starting with UID : $USER_ID" 10 | useradd --shell /bin/bash -u $USER_ID -o -c "" -m user 11 | export HOME=/home/user 12 | 13 | /usr/local/bin/gosu user "$@" 14 | -------------------------------------------------------------------------------- /ci/docker/connector_test_fips/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:8 2 | 3 | # This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ 4 | RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64" 5 | RUN chmod +x /usr/local/bin/gosu 6 | 7 | COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh 8 | RUN chmod +x /usr/local/bin/entrypoint.sh 9 | 10 | WORKDIR /home/user 11 | RUN chmod 777 /home/user 12 | 13 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 14 | 15 | RUN sed -i s/mirror.centos.org/vault.centos.org/g /etc/yum.repos.d/*.repo && \ 16 | sed -i s/^#.*baseurl=http/baseurl=http/g /etc/yum.repos.d/*.repo && \ 17 | sed -i s/^mirrorlist=http/#mirrorlist=http/g /etc/yum.repos.d/*.repo 18 | 19 | RUN yum clean all && \ 20 | yum install -y redhat-rpm-config gcc libffi-devel openssl openssl-devel && \ 21 | yum install -y python39 python39-devel && \ 22 | yum install -y java-11-openjdk && \ 23 | yum clean all && \ 24 | rm -rf /var/cache/yum 25 | RUN python3 -m pip install --user --upgrade pip setuptools wheel 26 | -------------------------------------------------------------------------------- /ci/docker/connector_test_fips/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add local user 4 | # Either use the LOCAL_USER_ID if passed in at runtime or 5 | # fallback 6 | 7 | USER_ID=${LOCAL_USER_ID:-9001} 8 | 9 | echo "Starting with UID : $USER_ID" 10 | useradd --shell /bin/bash -u $USER_ID -o -c "" -m user 11 | export HOME=/home/user 12 | 13 | . /opt/rh/rh-python38/enable 14 | /usr/local/bin/gosu user "$@" 15 | -------------------------------------------------------------------------------- /ci/docker/connector_test_lambda/Dockerfile310: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.10-x86_64 2 | 3 | RUN yum install -y git 4 | 5 | WORKDIR /home/user/snowflake-connector-python 6 | RUN chmod 777 /home/user/snowflake-connector-python 7 | ENV PATH="${PATH}:/opt/python/cp310-cp310/bin/" 8 | ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" 9 | 10 | RUN pip3 install -U pip setuptools wheel tox>=4 11 | 12 | CMD [ "app.handler" ] 13 | -------------------------------------------------------------------------------- /ci/docker/connector_test_lambda/Dockerfile311: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.11-x86_64 2 | 3 | RUN yum install -y git 4 | 5 | WORKDIR /home/user/snowflake-connector-python 6 | RUN chmod 777 /home/user/snowflake-connector-python 7 | ENV PATH="${PATH}:/opt/python/cp311-cp311/bin/" 8 | ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" 9 | 10 | RUN pip3 install -U pip setuptools wheel tox>=4 11 | 12 | CMD [ "app.handler" ] 13 | -------------------------------------------------------------------------------- /ci/docker/connector_test_lambda/Dockerfile312: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.12-x86_64 2 | 3 | WORKDIR /home/user/snowflake-connector-python 4 | RUN chmod 777 /home/user/snowflake-connector-python 5 | ENV PATH="${PATH}:/opt/python/cp312-cp312/bin/" 6 | ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" 7 | 8 | RUN pip3 install -U pip setuptools wheel tox>=4 9 | 10 | CMD [ "app.handler" ] 11 | -------------------------------------------------------------------------------- /ci/docker/connector_test_lambda/Dockerfile313: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.13-x86_64 2 | 3 | WORKDIR /home/user/snowflake-connector-python 4 | 5 | RUN dnf -y update && \ 6 | dnf clean all 7 | 8 | # Our dependencies rely on the Rust toolchain being available in the build-time environment (https://github.com/pyca/cryptography/issues/5771) 9 | RUN dnf -y install rust cargo 10 | RUN dnf -y upgrade 11 | 12 | RUN chmod 777 /home/user/snowflake-connector-python 13 | ENV PATH="${PATH}:/opt/python/cp313-cp313/bin/" 14 | ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" 15 | 16 | RUN pip3 install -U pip setuptools wheel tox>=4 17 | 18 | CMD [ "app.handler" ] 19 | -------------------------------------------------------------------------------- /ci/docker/connector_test_lambda/Dockerfile39: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.9-x86_64 2 | 3 | RUN yum install -y git 4 | 5 | WORKDIR /home/user/snowflake-connector-python 6 | RUN chmod 777 /home/user/snowflake-connector-python 7 | ENV PATH="${PATH}:/opt/python/cp39-cp39/bin/" 8 | ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" 9 | 10 | RUN pip3 install -U pip setuptools wheel tox>=4 11 | 12 | CMD [ "app.handler" ] 13 | -------------------------------------------------------------------------------- /ci/log_analyze_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # preparation for log analyze 4 | # 5 | 6 | # DOCKER ROOT /home/user/snowflake-connector-python 7 | 8 | export CLIENT_LOG_DIR_PATH_DOCKER=/home/user/snowflake-connector-python/ssm_rt_log 9 | export CLIENT_LOG_DIR_PATH=$WORKSPACE/target_client/ssm_rt_log 10 | echo "[INFO] CLIENT_LOG_DIR_PATH=$CLIENT_LOG_DIR_PATH" 11 | echo "[INFO] CLIENT_LOG_DIR_PATH_DOCKER=$CLIENT_LOG_DIR_PATH_DOCKER" 12 | 13 | export CLIENT_KNOWN_SSM_FILE_PATH_DOCKER=$CLIENT_LOG_DIR_PATH_DOCKER/rt_jenkins_log_known_ssm.txt 14 | export CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_LOG_DIR_PATH/rt_jenkins_log_known_ssm.txt 15 | echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH" 16 | echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH_DOCKER=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER" 17 | 18 | # [required envs] 19 | # To close log analyze, just set ENABLE_CLIENT_LOG_ANALYZE to not "true", e.g. "false". 20 | export ENABLE_CLIENT_LOG_ANALYZE="true" 21 | 22 | # The environment variable used by log analyze module 23 | export CLIENT_DRIVER_NAME=PYTHON 24 | -------------------------------------------------------------------------------- /ci/set_base_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Use the internal docker registry if running on Jenkins 4 | # 5 | set -o pipefail 6 | INTERNAL_REPO=nexus.int.snowflakecomputing.com:8086 7 | if [[ -n "$NEXUS_PASSWORD" ]]; then 8 | echo "[INFO] Pull docker images from $INTERNAL_REPO" 9 | NEXUS_USER=${USERNAME:-jenkins} 10 | docker login --username "$NEXUS_USER" --password "$NEXUS_PASSWORD" $INTERNAL_REPO 11 | export BASE_IMAGE_MANYLINUX2014=nexus.int.snowflakecomputing.com:8086/docker/manylinux2014_x86_64:2025.02.12-1 12 | export BASE_IMAGE_MANYLINUX2014AARCH64=nexus.int.snowflakecomputing.com:8086/docker/manylinux2014_aarch64:2025.02.12-1 13 | else 14 | echo "[INFO] Pull docker images from public registry" 15 | export BASE_IMAGE_MANYLINUX2014=quay.io/pypa/manylinux2014_x86_64 16 | export BASE_IMAGE_MANYLINUX2014AARCH64=quay.io/pypa/manylinux2014_aarch64 17 | fi 18 | -------------------------------------------------------------------------------- /ci/test_authentication.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | set -o pipefail 4 | 5 | 6 | export THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 7 | export WORKSPACE=${WORKSPACE:-/tmp} 8 | 9 | CI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 10 | if [[ -n "$JENKINS_HOME" ]]; then 11 | ROOT_DIR="$(cd "${CI_DIR}/.." && pwd)" 12 | export WORKSPACE=${WORKSPACE:-/tmp} 13 | echo "Use /sbin/ip" 14 | IP_ADDR=$(/sbin/ip -4 addr show scope global dev eth0 | grep inet | awk '{print $2}' | cut -d / -f 1) 15 | 16 | fi 17 | 18 | gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" --output $THIS_DIR/../.github/workflows/parameters/private/parameters_aws_auth_tests.json "$THIS_DIR/../.github/workflows/parameters/private/parameters_aws_auth_tests.json.gpg" 19 | gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" --output $THIS_DIR/../.github/workflows/parameters/private/rsa_keys/rsa_key.p8 "$THIS_DIR/../.github/workflows/parameters/private/rsa_keys/rsa_key.p8.gpg" 20 | gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" --output $THIS_DIR/../.github/workflows/parameters/private/rsa_keys/rsa_key_invalid.p8 "$THIS_DIR/../.github/workflows/parameters/private/rsa_keys/rsa_key_invalid.p8.gpg" 21 | 22 | docker run \ 23 | -v $(cd $THIS_DIR/.. && pwd):/mnt/host \ 24 | -v $WORKSPACE:/mnt/workspace \ 25 | --rm \ 26 | nexus.int.snowflakecomputing.com:8086/docker/snowdrivers-test-external-browser-python:1 \ 27 | "/mnt/host/ci/container/test_authentication.sh" 28 | -------------------------------------------------------------------------------- /ci/test_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # Test Snowflake Python Connector in Docker 3 | # NOTES: 4 | # - By default this script runs Python 3.9 tests, as these are installed in dev vms 5 | # - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.9 3.10"` 6 | 7 | set -o pipefail 8 | 9 | # In case this is ran from dev-vm 10 | PYTHON_ENV=${1:-3.9} 11 | 12 | # Set constants 13 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 14 | CONNECTOR_DIR="$( dirname "${THIS_DIR}")" 15 | WORKSPACE=${WORKSPACE:-${CONNECTOR_DIR}} 16 | source $THIS_DIR/set_base_image.sh 17 | 18 | cd $THIS_DIR/docker/connector_test 19 | 20 | CONTAINER_NAME=test_pyconnector 21 | arch=$(uname -p) 22 | 23 | echo "[Info] Building docker image" 24 | if [[ "$arch" == "aarch64" ]]; then 25 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014AARCH64 26 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-arm64 27 | else 28 | BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014 29 | GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 30 | fi 31 | 32 | docker build --pull -t ${CONTAINER_NAME}:1.0 --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" . -f Dockerfile 33 | 34 | user_id=$(id -u ${USER}) 35 | docker run --network=host \ 36 | -e TERM=vt102 \ 37 | -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ 38 | -e OPENSSL_FIPS=1 \ 39 | -e LOCAL_USER_ID=${user_id} \ 40 | -e AWS_ACCESS_KEY_ID \ 41 | -e AWS_SECRET_ACCESS_KEY \ 42 | -e SF_REGRESS_LOGS \ 43 | -e SF_PROJECT_ROOT \ 44 | -e cloud_provider \ 45 | -e JENKINS_HOME \ 46 | -e is_old_driver \ 47 | -e GITHUB_ACTIONS \ 48 | --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ 49 | ${CONTAINER_NAME}:1.0 \ 50 | /home/user/snowflake-connector-python/ci/test_linux.sh ${PYTHON_ENV} 51 | -------------------------------------------------------------------------------- /ci/test_fips.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Test Snowflake Connector 4 | # Note this is the script that test_docker.sh runs inside of the docker container 5 | # 6 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 7 | # shellcheck disable=SC1090 8 | CONNECTOR_DIR="$( dirname "${THIS_DIR}")" 9 | CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl | sort -r | head -n 1)" 10 | 11 | # fetch wiremock 12 | curl https://repo1.maven.org/maven2/org/wiremock/wiremock-standalone/3.11.0/wiremock-standalone-3.11.0.jar --output "${CONNECTOR_DIR}/.wiremock/wiremock-standalone.jar" 13 | 14 | python3 -m venv fips_env 15 | source fips_env/bin/activate 16 | pip install -U setuptools pip 17 | pip install "${CONNECTOR_WHL}[pandas,secure-local-storage,development]" 18 | 19 | echo "!!! Environment description !!!" 20 | echo "Default installed OpenSSL version" 21 | openssl version 22 | python -c "import ssl; print('Python openssl library: ' + ssl.OPENSSL_VERSION)" 23 | python -c "from cryptography.hazmat.backends.openssl import backend;print('Cryptography openssl library: ' + backend.openssl_version_text())" 24 | pip freeze 25 | 26 | cd $CONNECTOR_DIR 27 | pytest -vvv --cov=snowflake.connector --cov-report=xml:coverage.xml test 28 | 29 | deactivate 30 | -------------------------------------------------------------------------------- /ci/test_fips_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | 3 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | CONNECTOR_DIR="$( dirname "${THIS_DIR}")" 5 | # In case this is not run locally and not on Jenkins 6 | 7 | if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl) == '' ]]; then 8 | echo "Missing wheel files, going to compile Python connector in Docker..." 9 | $THIS_DIR/build_docker.sh 3.9 10 | cp $CONNECTOR_DIR/dist/repaired_wheels/*cp39*manylinux2014*.whl $CONNECTOR_DIR/dist/ 11 | fi 12 | 13 | cd $THIS_DIR/docker/connector_test_fips 14 | 15 | CONTAINER_NAME=test_fips_connector 16 | 17 | echo "[Info] Start building docker image" 18 | docker build -t ${CONTAINER_NAME}:1.0 -f Dockerfile . 19 | 20 | user_id=$(id -u $USER) 21 | docker run --network=host \ 22 | -e LANG=en_US.UTF-8 \ 23 | -e TERM=vt102 \ 24 | -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ 25 | -e LOCAL_USER_ID=$user_id \ 26 | -e CRYPTOGRAPHY_ALLOW_OPENSSL_102=1 \ 27 | -e AWS_ACCESS_KEY_ID \ 28 | -e AWS_SECRET_ACCESS_KEY \ 29 | -e SF_REGRESS_LOGS \ 30 | -e SF_PROJECT_ROOT \ 31 | -e cloud_provider \ 32 | -e PYTEST_ADDOPTS \ 33 | -e GITHUB_ACTIONS \ 34 | --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ 35 | ${CONTAINER_NAME}:1.0 \ 36 | /home/user/snowflake-connector-python/ci/test_fips.sh $1 37 | -------------------------------------------------------------------------------- /ci/test_windows.bat: -------------------------------------------------------------------------------- 1 | :: 2 | :: Test PythonConnector on Windows 3 | :: 4 | 5 | 6 | SET SCRIPT_DIR=%~dp0 7 | SET CONNECTOR_DIR=%~dp0\..\ 8 | :: E.g.: 35 9 | set pv=%1 10 | 11 | cd %CONNECTOR_DIR% 12 | 13 | dir /b * | findstr ^snowflake_connector_python.*%pv%.*whl$ > whl_name 14 | if %errorlevel% neq 0 goto :error 15 | 16 | set /p connector_whl= test\parameters.py 30 | 31 | :: create tox execution virtual env 32 | set venv_dir=%WORKSPACE%\tox_venv 33 | py -3.9 -m venv %venv_dir% 34 | if %errorlevel% neq 0 goto :error 35 | 36 | call %venv_dir%\scripts\activate 37 | if %errorlevel% neq 0 goto :error 38 | 39 | python -m pip install -U pip "tox>=4" 40 | if %errorlevel% neq 0 goto :error 41 | 42 | cd %CONNECTOR_DIR% 43 | 44 | :: Fetch wiremock 45 | curl https://repo1.maven.org/maven2/org/wiremock/wiremock-standalone/3.11.0/wiremock-standalone-3.11.0.jar --output %CONNECTOR_DIR%\.wiremock\wiremock-standalone.jar 46 | 47 | set JUNIT_REPORT_DIR=%workspace% 48 | set COV_REPORT_DIR=%workspace% 49 | 50 | set TEST_ENVLIST=fix_lint,py%pv%-unit-ci,py%pv%-integ-ci,py%pv%-pandas-ci,py%pv%-sso-ci,py%pv%-coverage 51 | tox -e %TEST_ENVLIST% --installpkg %connector_whl% 52 | if %errorlevel% neq 0 goto :error 53 | 54 | call deactivate 55 | EXIT /B 0 56 | 57 | :error 58 | exit /b %errorlevel% 59 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | follow_imports = silent 3 | 4 | [mypy-snowflake.connector.vendored.*] 5 | ignore_errors = True 6 | ignore_missing_imports = True 7 | 8 | [mypy-test.parameters] 9 | ignore_errors = True 10 | 11 | [mypy-pyarrow.*] 12 | ignore_missing_imports = True 13 | 14 | [mypy-pandas.*] 15 | ignore_missing_imports = True 16 | -------------------------------------------------------------------------------- /prober/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/prober/__init__.py -------------------------------------------------------------------------------- /prober/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Initialize an empty string to hold all parameters 4 | params="" 5 | 6 | # Parse command-line arguments dynamically 7 | while [[ "$#" -gt 0 ]]; do 8 | params="$params $1 $2" 9 | shift 2 10 | done 11 | 12 | # Run main.py with all available virtual environments 13 | for venv in /venvs/*; do 14 | echo "Running main.py with virtual environment: $(basename "$venv")" 15 | source "$venv/bin/activate" 16 | prober $params 17 | deactivate 18 | done 19 | -------------------------------------------------------------------------------- /prober/probes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/prober/probes/__init__.py -------------------------------------------------------------------------------- /prober/probes/logging_config.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | 4 | def initialize_logger(name=__name__, level=logging.INFO): 5 | """ 6 | Initializes and configures a logger. 7 | 8 | Args: 9 | name (str): The name of the logger. 10 | level (int): The logging level (e.g., logging.INFO, logging.DEBUG). 11 | 12 | Returns: 13 | logging.Logger: Configured logger instance. 14 | """ 15 | logger = logging.getLogger(name) 16 | logger.setLevel(level) 17 | 18 | # Create a console handler 19 | handler = logging.StreamHandler() 20 | handler.setLevel(level) 21 | 22 | # Create a formatter and set it for the handler 23 | formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") 24 | handler.setFormatter(formatter) 25 | 26 | # Add the handler to the logger 27 | if not logger.handlers: # Avoid duplicate handlers 28 | logger.addHandler(handler) 29 | 30 | return logger 31 | -------------------------------------------------------------------------------- /prober/probes/registry.py: -------------------------------------------------------------------------------- 1 | PROBES_FUNCTIONS = {} 2 | 3 | 4 | def prober_function(func): 5 | """ 6 | Register a function in the PROBES_FUNCTIONS dictionary. 7 | The key is the function name, and the value is the function itself. 8 | """ 9 | PROBES_FUNCTIONS[func.__name__] = func 10 | return func 11 | -------------------------------------------------------------------------------- /prober/probes/testing_matrix.json: -------------------------------------------------------------------------------- 1 | { 2 | "snowflake-connector-python": [ 3 | { 4 | "version": "3.15.0", 5 | "python_version": ["3.8", "3.9", "3.10"] 6 | }, 7 | { 8 | "version": "3.14.1", 9 | "python_version": ["3.9", "3.10", "3.11"], 10 | "features": ["login", "fetch", "get"] 11 | }, 12 | { 13 | "version": "3.14.0", 14 | "python_version": ["3.10", "3.11", "3.12"] 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /prober/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="snowflake_prober", 5 | version="1.0.0", 6 | packages=find_packages(), 7 | install_requires=[ 8 | "snowflake-connector-python", 9 | "requests", 10 | ], 11 | entry_points={ 12 | "console_scripts": [ 13 | "prober=probes.main:main", 14 | ], 15 | }, 16 | ) 17 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | # The minimum setuptools version is specific to the PEP 517 backend, 4 | # and may be stricter than the version required in `setup.py` 5 | "setuptools>=40.6.0", 6 | "wheel", 7 | "cython", 8 | ] 9 | 10 | [tool.cibuildwheel] 11 | test-skip = "*" 12 | manylinux-x86_64-image = "manylinux2014" 13 | environment = {AUDITWHEEL_PLAT="manylinux2014_$(uname -m)"} 14 | build-verbosity = 1 15 | 16 | [tool.cibuildwheel.linux] 17 | archs = ["x86_64", "aarch64"] 18 | 19 | [tool.cibuildwheel.macos] 20 | archs = ["x86_64", "arm64"] 21 | # Don't repair macOS wheels 22 | repair-wheel-command = "" 23 | 24 | [tool.cibuildwheel.windows] 25 | archs = ["AMD64"] 26 | -------------------------------------------------------------------------------- /samples/README.md: -------------------------------------------------------------------------------- 1 | # Snowflake Connector for Python Samples 2 | 3 | These are code samples that show common scenario operations with the Snowflake Connector for Python library. 4 | 5 | - auth_by_key_pair_from_file.py - Example to implement a custom key pair authentication plugin 6 | which reads private key from a file 7 | -------------------------------------------------------------------------------- /src/snowflake/connector/_sql_util.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | 5 | from .constants import FileTransferType 6 | 7 | COMMENT_START_SQL_RE = re.compile( 8 | r""" 9 | ^\s*(?: 10 | /\*[\w\W]*?\*/ 11 | )""", 12 | re.VERBOSE, 13 | ) 14 | 15 | PUT_SQL_RE = re.compile(r"^\s*put", flags=re.IGNORECASE) 16 | GET_SQL_RE = re.compile(r"^\s*get", flags=re.IGNORECASE) 17 | 18 | 19 | def remove_starting_comments(sql: str) -> str: 20 | """Remove all comments from the start of a SQL statement.""" 21 | commentless_sql = sql 22 | while True: 23 | start_comment = COMMENT_START_SQL_RE.match(commentless_sql) 24 | if start_comment is None: 25 | break 26 | commentless_sql = commentless_sql[start_comment.end() :] 27 | return commentless_sql 28 | 29 | 30 | def get_file_transfer_type(sql: str) -> FileTransferType | None: 31 | """Decide whether a SQL is a file transfer and return its type. 32 | 33 | None is returned if the SQL isn't a file transfer so that this function can be 34 | used in an if-statement. 35 | """ 36 | commentless_sql = remove_starting_comments(sql) 37 | if PUT_SQL_RE.match(commentless_sql): 38 | return FileTransferType.PUT 39 | elif GET_SQL_RE.match(commentless_sql): 40 | return FileTransferType.GET 41 | 42 | 43 | def is_put_statement(sql: str) -> bool: 44 | return get_file_transfer_type(sql) == FileTransferType.PUT 45 | 46 | 47 | def is_get_statement(sql: str) -> bool: 48 | return get_file_transfer_type(sql) == FileTransferType.GET 49 | -------------------------------------------------------------------------------- /src/snowflake/connector/auth/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from ._auth import Auth, get_public_key_fingerprint, get_token_from_private_key 4 | from .by_plugin import AuthByPlugin, AuthType 5 | from .default import AuthByDefault 6 | from .idtoken import AuthByIdToken 7 | from .keypair import AuthByKeyPair 8 | from .no_auth import AuthNoAuth 9 | from .oauth import AuthByOAuth 10 | from .oauth_code import AuthByOauthCode 11 | from .oauth_credentials import AuthByOauthCredentials 12 | from .okta import AuthByOkta 13 | from .pat import AuthByPAT 14 | from .usrpwdmfa import AuthByUsrPwdMfa 15 | from .webbrowser import AuthByWebBrowser 16 | from .workload_identity import AuthByWorkloadIdentity 17 | 18 | FIRST_PARTY_AUTHENTICATORS = frozenset( 19 | ( 20 | AuthByDefault, 21 | AuthByKeyPair, 22 | AuthByOAuth, 23 | AuthByOauthCode, 24 | AuthByOauthCredentials, 25 | AuthByOkta, 26 | AuthByUsrPwdMfa, 27 | AuthByWebBrowser, 28 | AuthByIdToken, 29 | AuthByPAT, 30 | AuthByWorkloadIdentity, 31 | AuthNoAuth, 32 | ) 33 | ) 34 | 35 | __all__ = [ 36 | "AuthByPlugin", 37 | "AuthByDefault", 38 | "AuthByKeyPair", 39 | "AuthByPAT", 40 | "AuthByOAuth", 41 | "AuthByOauthCode", 42 | "AuthByOauthCredentials", 43 | "AuthByOkta", 44 | "AuthByUsrPwdMfa", 45 | "AuthByWebBrowser", 46 | "AuthByWorkloadIdentity", 47 | "AuthNoAuth", 48 | "Auth", 49 | "AuthType", 50 | "FIRST_PARTY_AUTHENTICATORS", 51 | "get_public_key_fingerprint", 52 | "get_token_from_private_key", 53 | ] 54 | -------------------------------------------------------------------------------- /src/snowflake/connector/auth/default.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from typing import Any 5 | 6 | from .by_plugin import AuthByPlugin, AuthType 7 | 8 | 9 | class AuthByDefault(AuthByPlugin): 10 | """Default username and password authenticator.""" 11 | 12 | @property 13 | def type_(self) -> AuthType: 14 | return AuthType.DEFAULT 15 | 16 | @property 17 | def assertion_content(self) -> str: 18 | return "*********" 19 | 20 | def __init__(self, password: str, **kwargs) -> None: 21 | """Initializes an instance with a password.""" 22 | super().__init__(**kwargs) 23 | self._password: str | None = password 24 | 25 | def reset_secrets(self) -> None: 26 | self._password = None 27 | 28 | def prepare(self, **kwargs: Any) -> None: 29 | pass 30 | 31 | def reauthenticate(self, **kwargs: Any) -> dict[str, bool]: 32 | return {"success": False} 33 | 34 | def update_body(self, body: dict[Any, Any]) -> None: 35 | """Sets the password if available.""" 36 | body["data"]["PASSWORD"] = self._password 37 | -------------------------------------------------------------------------------- /src/snowflake/connector/auth/no_auth.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from typing import Any 5 | 6 | from .by_plugin import AuthByPlugin, AuthType 7 | 8 | 9 | class AuthNoAuth(AuthByPlugin): 10 | """No-auth Authentication. 11 | 12 | It is a dummy auth that requires no extra connection establishment. 13 | """ 14 | 15 | @property 16 | def type_(self) -> AuthType: 17 | return AuthType.NO_AUTH 18 | 19 | @property 20 | def assertion_content(self) -> str | None: 21 | return None 22 | 23 | def __init__(self) -> None: 24 | super().__init__() 25 | 26 | def reset_secrets(self) -> None: 27 | pass 28 | 29 | def prepare( 30 | self, 31 | **kwargs: Any, 32 | ) -> None: 33 | pass 34 | 35 | def reauthenticate(self, **kwargs: Any) -> dict[str, bool]: 36 | return {"success": True} 37 | 38 | def update_body(self, body: dict[Any, Any]) -> None: 39 | pass 40 | -------------------------------------------------------------------------------- /src/snowflake/connector/auth/oauth.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from typing import Any 5 | 6 | from ..network import OAUTH_AUTHENTICATOR 7 | from .by_plugin import AuthByPlugin, AuthType 8 | 9 | 10 | class AuthByOAuth(AuthByPlugin): 11 | """OAuth Based Authentication. 12 | 13 | Works by accepting an OAuth token and using that to authenticate. 14 | """ 15 | 16 | @property 17 | def type_(self) -> AuthType: 18 | return AuthType.OAUTH 19 | 20 | @property 21 | def assertion_content(self) -> str | None: 22 | """Returns the token.""" 23 | return self._oauth_token 24 | 25 | def __init__(self, oauth_token: str, **kwargs) -> None: 26 | """Initializes an instance with an OAuth Token.""" 27 | super().__init__(**kwargs) 28 | self._oauth_token: str | None = oauth_token 29 | 30 | def reset_secrets(self) -> None: 31 | self._oauth_token = None 32 | 33 | def prepare( 34 | self, 35 | **kwargs: Any, 36 | ) -> None: 37 | """Nothing to do here, token should be obtained outside the driver.""" 38 | pass 39 | 40 | def reauthenticate(self, **kwargs: Any) -> dict[str, bool]: 41 | return {"success": False} 42 | 43 | def update_body(self, body: dict[Any, Any]) -> None: 44 | """Update some information required by OAuth. 45 | 46 | OAuth needs the authenticator and token attributes set, as well as loginname, which is set already in auth.py. 47 | """ 48 | body["data"]["AUTHENTICATOR"] = OAUTH_AUTHENTICATOR 49 | body["data"]["TOKEN"] = self._oauth_token 50 | -------------------------------------------------------------------------------- /src/snowflake/connector/auth/pat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import typing 4 | 5 | from snowflake.connector.network import PROGRAMMATIC_ACCESS_TOKEN 6 | 7 | from .by_plugin import AuthByPlugin, AuthType 8 | 9 | 10 | class AuthByPAT(AuthByPlugin): 11 | 12 | def __init__(self, pat_token: str, **kwargs) -> None: 13 | super().__init__(**kwargs) 14 | self._pat_token: str | None = pat_token 15 | 16 | def type_(self) -> AuthType: 17 | return AuthType.PAT 18 | 19 | def reset_secrets(self) -> None: 20 | self._pat_token = None 21 | 22 | def update_body(self, body: dict[typing.Any, typing.Any]) -> None: 23 | body["data"]["AUTHENTICATOR"] = PROGRAMMATIC_ACCESS_TOKEN 24 | body["data"]["TOKEN"] = self._pat_token 25 | 26 | def prepare( 27 | self, 28 | **kwargs: typing.Any, 29 | ) -> None: 30 | """Nothing to do here, token should be obtained outside the driver.""" 31 | pass 32 | 33 | def reauthenticate(self, **kwargs: typing.Any) -> dict[str, bool]: 34 | return {"success": False} 35 | 36 | @property 37 | def assertion_content(self) -> str | None: 38 | """Returns the token.""" 39 | return self._pat_token 40 | -------------------------------------------------------------------------------- /src/snowflake/connector/converter_null.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from typing import Any 5 | 6 | from .converter import SnowflakeConverter 7 | 8 | 9 | class SnowflakeNoConverterToPython(SnowflakeConverter): 10 | def __init__(self, **kwargs) -> None: 11 | super().__init__(**kwargs) 12 | 13 | def to_python_method(self, type_name: str, column: dict[str, Any]) -> None: 14 | return None 15 | -------------------------------------------------------------------------------- /src/snowflake/connector/dbapi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """This module implements some constructors and singletons as required by the DB API v2.0 (PEP-249).""" 3 | 4 | from __future__ import annotations 5 | 6 | import datetime 7 | import time 8 | 9 | from .constants import ( 10 | get_binary_types, 11 | get_number_types, 12 | get_string_types, 13 | get_timestamp_types, 14 | ) 15 | 16 | 17 | class _DBAPITypeObject: 18 | def __init__(self, *values) -> None: 19 | self.values = values 20 | 21 | def __cmp__(self, other): 22 | if other in self.values: 23 | return 0 24 | if other < self.values: 25 | return 1 26 | else: 27 | return -1 28 | 29 | 30 | Date = datetime.date 31 | Time = datetime.time 32 | Timestamp = datetime.datetime 33 | 34 | 35 | def DateFromTicks(ticks: float) -> datetime.date: 36 | return Date(*time.localtime(ticks)[:3]) 37 | 38 | 39 | def TimeFromTicks(ticks: float) -> datetime.time: 40 | return Time(*time.localtime(ticks)[3:6]) 41 | 42 | 43 | def TimestampFromTicks(ticks: float) -> datetime.datetime: 44 | return Timestamp(*time.localtime(ticks)[:6]) 45 | 46 | 47 | Binary = bytes 48 | 49 | STRING = _DBAPITypeObject(get_string_types()) 50 | BINARY = _DBAPITypeObject(get_binary_types()) 51 | NUMBER = _DBAPITypeObject(get_number_types()) 52 | DATETIME = _DBAPITypeObject(get_timestamp_types()) 53 | ROWID = _DBAPITypeObject() 54 | -------------------------------------------------------------------------------- /src/snowflake/connector/description.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Various constants.""" 3 | 4 | from __future__ import annotations 5 | 6 | import platform 7 | import sys 8 | 9 | from .version import VERSION 10 | 11 | SNOWFLAKE_CONNECTOR_VERSION = ".".join(str(v) for v in VERSION[0:3]) 12 | PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:3]) 13 | OPERATING_SYSTEM = platform.system() 14 | PLATFORM = platform.platform() 15 | IMPLEMENTATION = platform.python_implementation() 16 | COMPILER = platform.python_compiler() 17 | 18 | CLIENT_NAME = "PythonConnector" # don't change! 19 | CLIENT_VERSION = ".".join([str(v) for v in VERSION[:3]]) 20 | -------------------------------------------------------------------------------- /src/snowflake/connector/externals_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/externals_utils/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/externals_utils/externals_setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from snowflake.connector.logging_utils.filters import ( 4 | SecretMaskingFilter, 5 | add_filter_to_logger_and_children, 6 | ) 7 | 8 | MODULES_TO_MASK_LOGS_NAMES = [ 9 | "snowflake.connector.vendored.urllib3", 10 | "botocore", 11 | "boto3", 12 | ] 13 | # TODO: after migration to the external urllib3 from the vendored one (SNOW-2041970), 14 | # we should change filters here immediately to the below module's logger: 15 | # MODULES_TO_MASK_LOGS_NAMES = [ "urllib3", ... ] 16 | 17 | 18 | def add_filters_to_external_loggers(): 19 | for module_name in MODULES_TO_MASK_LOGS_NAMES: 20 | add_filter_to_logger_and_children(module_name, SecretMaskingFilter()) 21 | 22 | 23 | def setup_external_libraries(): 24 | """ 25 | Assures proper setup and injections before any external libraries are used. 26 | """ 27 | add_filters_to_external_loggers() 28 | -------------------------------------------------------------------------------- /src/snowflake/connector/feature.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Feature flags 3 | 4 | feature_use_pyopenssl = True # use pyopenssl API or openssl command 5 | -------------------------------------------------------------------------------- /src/snowflake/connector/logging_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/logging_utils/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/ArrayConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_ARRAYCONVERTER_HPP 2 | #define PC_ARRAYCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | #include "nanoarrow.hpp" 10 | 11 | namespace sf { 12 | 13 | class ArrayConverter : public IColumnConverter { 14 | public: 15 | explicit ArrayConverter(ArrowSchemaView* schemaView, ArrowArrayView* array, 16 | PyObject* context, bool useNumpy); 17 | 18 | PyObject* toPyObject(int64_t rowIndex) const override; 19 | 20 | private: 21 | void generateError(const std::string& msg) const; 22 | 23 | ArrowArrayView* m_array; 24 | std::shared_ptr m_item_converter; 25 | static Logger* logger; 26 | }; 27 | 28 | } // namespace sf 29 | #endif // PC_ARRAYCONVERTER_HPP 30 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/BinaryConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "BinaryConverter.hpp" 2 | 3 | #include 4 | 5 | namespace sf { 6 | Logger* BinaryConverter::logger = 7 | new Logger("snowflake.connector.BinaryConverter"); 8 | 9 | BinaryConverter::BinaryConverter(ArrowArrayView* array) : m_array(array) {} 10 | 11 | PyObject* BinaryConverter::toPyObject(int64_t rowIndex) const { 12 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 13 | Py_RETURN_NONE; 14 | } 15 | ArrowStringView stringView = ArrowArrayViewGetStringUnsafe(m_array, rowIndex); 16 | return PyByteArray_FromStringAndSize(stringView.data, stringView.size_bytes); 17 | } 18 | 19 | } // namespace sf 20 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/BinaryConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_BINARYCONVERTER_HPP 2 | #define PC_BINARYCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | 10 | namespace sf { 11 | 12 | class BinaryConverter : public IColumnConverter { 13 | public: 14 | explicit BinaryConverter(ArrowArrayView* array); 15 | 16 | PyObject* toPyObject(int64_t rowIndex) const override; 17 | 18 | private: 19 | ArrowArrayView* m_array; 20 | 21 | static Logger* logger; 22 | }; 23 | 24 | } // namespace sf 25 | 26 | #endif // PC_BINARYCONVERTER_HPP 27 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/BooleanConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "BooleanConverter.hpp" 2 | 3 | #include 4 | 5 | namespace sf { 6 | 7 | BooleanConverter::BooleanConverter(ArrowArrayView* array) : m_array(array) {} 8 | 9 | PyObject* BooleanConverter::toPyObject(int64_t rowIndex) const { 10 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 11 | Py_RETURN_NONE; 12 | } 13 | 14 | if (ArrowArrayViewGetIntUnsafe(m_array, rowIndex)) { 15 | Py_RETURN_TRUE; 16 | } else { 17 | Py_RETURN_FALSE; 18 | } 19 | } 20 | 21 | } // namespace sf 22 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/BooleanConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_BOOLEANCONVERTER_HPP 2 | #define PC_BOOLEANCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "nanoarrow.h" 8 | 9 | namespace sf { 10 | 11 | class BooleanConverter : public IColumnConverter { 12 | public: 13 | explicit BooleanConverter(ArrowArrayView* array); 14 | 15 | PyObject* toPyObject(int64_t rowIndex) const override; 16 | 17 | private: 18 | ArrowArrayView* m_array; 19 | }; 20 | 21 | } // namespace sf 22 | 23 | #endif // PC_BOOLEANCONVERTER_HPP 24 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/DateConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "DateConverter.hpp" 2 | 3 | #include 4 | 5 | #include "Python/Helpers.hpp" 6 | 7 | namespace sf { 8 | Logger* DateConverter::logger = new Logger("snowflake.connector.DateConverter"); 9 | 10 | py::UniqueRef& DateConverter::initPyDatetimeDate() { 11 | static py::UniqueRef pyDatetimeDate; 12 | if (pyDatetimeDate.empty()) { 13 | py::UniqueRef pyDatetimeModule; 14 | py::importPythonModule("datetime", pyDatetimeModule); 15 | py::importFromModule(pyDatetimeModule, "date", pyDatetimeDate); 16 | Py_XINCREF(pyDatetimeDate.get()); 17 | } 18 | return pyDatetimeDate; 19 | } 20 | 21 | DateConverter::DateConverter(ArrowArrayView* array) 22 | : m_array(array), m_pyDatetimeDate(initPyDatetimeDate()) {} 23 | 24 | PyObject* DateConverter::toPyObject(int64_t rowIndex) const { 25 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 26 | Py_RETURN_NONE; 27 | } 28 | 29 | int64_t deltaDays = ArrowArrayViewGetIntUnsafe(m_array, rowIndex); 30 | return PyObject_CallMethod(m_pyDatetimeDate.get(), "fromordinal", "i", 31 | epochDay + deltaDays); 32 | } 33 | 34 | NumpyDateConverter::NumpyDateConverter(ArrowArrayView* array, PyObject* context) 35 | : m_array(array), m_context(context) {} 36 | 37 | PyObject* NumpyDateConverter::toPyObject(int64_t rowIndex) const { 38 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 39 | Py_RETURN_NONE; 40 | } 41 | 42 | int64_t deltaDays = ArrowArrayViewGetIntUnsafe(m_array, rowIndex); 43 | return PyObject_CallMethod(m_context, "DATE_to_numpy_datetime64", "i", 44 | deltaDays); 45 | } 46 | 47 | } // namespace sf 48 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/DateConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_DATECONVERTER_HPP 2 | #define PC_DATECONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "Python/Common.hpp" 8 | #include "logging.hpp" 9 | #include "nanoarrow.h" 10 | 11 | namespace sf { 12 | 13 | class DateConverter : public IColumnConverter { 14 | public: 15 | explicit DateConverter(ArrowArrayView* array); 16 | 17 | PyObject* toPyObject(int64_t rowIndex) const override; 18 | 19 | private: 20 | static py::UniqueRef& initPyDatetimeDate(); 21 | 22 | ArrowArrayView* m_array; 23 | 24 | /** from Python Ordinal to 1970-01-01 */ 25 | static constexpr int epochDay = 719163; 26 | 27 | static Logger* logger; 28 | 29 | py::UniqueRef& m_pyDatetimeDate; 30 | }; 31 | 32 | class NumpyDateConverter : public IColumnConverter { 33 | public: 34 | explicit NumpyDateConverter(ArrowArrayView* array, PyObject* context); 35 | 36 | PyObject* toPyObject(int64_t rowIndex) const override; 37 | 38 | private: 39 | ArrowArrayView* m_array; 40 | 41 | PyObject* m_context; 42 | }; 43 | 44 | } // namespace sf 45 | 46 | #endif // PC_DATECONVERTER_HPP 47 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/DecFloatConverter.hpp: -------------------------------------------------------------------------------- 1 | 2 | #ifndef PC_DECFLOATCONVERTER_HPP 3 | #define PC_DECFLOATCONVERTER_HPP 4 | 5 | #include 6 | 7 | #include "IColumnConverter.hpp" 8 | #include "logging.hpp" 9 | #include "nanoarrow.h" 10 | 11 | namespace sf { 12 | 13 | class DecFloatConverter : public IColumnConverter { 14 | public: 15 | const static std::string FIELD_NAME_EXPONENT; 16 | const static std::string FIELD_NAME_SIGNIFICAND; 17 | 18 | explicit DecFloatConverter(ArrowArrayView& array, ArrowSchemaView& schema, 19 | PyObject& context, bool useNumpy); 20 | 21 | PyObject* toPyObject(int64_t rowIndex) const override; 22 | 23 | private: 24 | PyObject& m_context; 25 | ArrowArrayView& m_array; 26 | ArrowArrayView* m_exponent; 27 | ArrowArrayView* m_significand; 28 | bool m_useNumpy; 29 | 30 | static Logger* logger; 31 | }; 32 | 33 | } // namespace sf 34 | 35 | #endif // PC_DECFLOATCONVERTER_HPP 36 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/FixedSizeListConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_FIXEDSIZELISTCONVERTER_HPP 2 | #define PC_FIXEDSIZELISTCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | #include "nanoarrow.hpp" 10 | 11 | namespace sf { 12 | 13 | class FixedSizeListConverter : public IColumnConverter { 14 | public: 15 | explicit FixedSizeListConverter(ArrowArrayView* array); 16 | PyObject* toPyObject(int64_t rowIndex) const override; 17 | 18 | private: 19 | void generateError(const std::string& msg) const; 20 | 21 | ArrowArrayView* m_array; 22 | 23 | static Logger* logger; 24 | }; 25 | 26 | } // namespace sf 27 | 28 | #endif // PC_FIXEDSIZELISTCONVERTER_HPP 29 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/FloatConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "FloatConverter.hpp" 2 | 3 | #include 4 | 5 | namespace sf { 6 | 7 | /** snowflake float is 64-precision, which refers to double here */ 8 | FloatConverter::FloatConverter(ArrowArrayView* array) : m_array(array) {} 9 | 10 | PyObject* FloatConverter::toPyObject(int64_t rowIndex) const { 11 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 12 | Py_RETURN_NONE; 13 | } 14 | return PyFloat_FromDouble(ArrowArrayViewGetDoubleUnsafe(m_array, rowIndex)); 15 | } 16 | 17 | NumpyFloat64Converter::NumpyFloat64Converter(ArrowArrayView* array, 18 | PyObject* context) 19 | : m_array(array), m_context(context) {} 20 | 21 | PyObject* NumpyFloat64Converter::toPyObject(int64_t rowIndex) const { 22 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 23 | Py_RETURN_NONE; 24 | } 25 | 26 | double val = ArrowArrayViewGetDoubleUnsafe(m_array, rowIndex); 27 | return PyObject_CallMethod(m_context, "REAL_to_numpy_float64", "d", val); 28 | } 29 | 30 | } // namespace sf 31 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/FloatConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_FLOATCONVERTER_HPP 2 | #define PC_FLOATCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "nanoarrow.h" 8 | 9 | namespace sf { 10 | 11 | class FloatConverter : public IColumnConverter { 12 | public: 13 | explicit FloatConverter(ArrowArrayView* array); 14 | 15 | PyObject* toPyObject(int64_t rowIndex) const override; 16 | 17 | private: 18 | ArrowArrayView* m_array; 19 | }; 20 | 21 | class NumpyFloat64Converter : public IColumnConverter { 22 | public: 23 | explicit NumpyFloat64Converter(ArrowArrayView* array, PyObject* context); 24 | 25 | PyObject* toPyObject(int64_t rowIndex) const override; 26 | 27 | private: 28 | ArrowArrayView* m_array; 29 | 30 | PyObject* m_context; 31 | }; 32 | 33 | } // namespace sf 34 | 35 | #endif // PC_FLOATCONVERTER_HPP 36 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/IColumnConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_ICOLUMNCONVERTER_HPP 2 | #define PC_ICOLUMNCONVERTER_HPP 3 | 4 | #include "Python/Common.hpp" 5 | 6 | namespace sf { 7 | 8 | class IColumnConverter { 9 | public: 10 | IColumnConverter() = default; 11 | virtual ~IColumnConverter() = default; 12 | // The caller is responsible for calling DECREF on the returned pointer 13 | virtual PyObject* toPyObject(int64_t rowIndex) const = 0; 14 | }; 15 | } // namespace sf 16 | 17 | #endif // PC_ICOLUMNCONVERTER_HPP 18 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/IntConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "IntConverter.hpp" 2 | 3 | namespace sf { 4 | /** this file is here for future use and if this is useless at the end, it will 5 | * be removed */ 6 | 7 | PyObject* IntConverter::toPyObject(int64_t rowIndex) const { 8 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 9 | Py_RETURN_NONE; 10 | } 11 | int64_t val = ArrowArrayViewGetIntUnsafe(m_array, rowIndex); 12 | return pyLongForward(val); 13 | } 14 | 15 | PyObject* NumpyIntConverter::toPyObject(int64_t rowIndex) const { 16 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 17 | Py_RETURN_NONE; 18 | } 19 | int64_t val = ArrowArrayViewGetIntUnsafe(m_array, rowIndex); 20 | return PyObject_CallMethod(m_context, "FIXED_to_numpy_int64", "L", val); 21 | } 22 | 23 | } // namespace sf 24 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/IntConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_INTCONVERTER_HPP 2 | #define PC_INTCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "nanoarrow.h" 8 | #include "nanoarrow.hpp" 9 | 10 | namespace sf { 11 | 12 | class IntConverter : public IColumnConverter { 13 | public: 14 | explicit IntConverter(ArrowArrayView* array) : m_array(array) {} 15 | 16 | PyObject* pyLongForward(int64_t value) const { 17 | return PyLong_FromLongLong(value); 18 | } 19 | 20 | PyObject* pyLongForward(int32_t value) const { 21 | return PyLong_FromLong(value); 22 | } 23 | 24 | PyObject* toPyObject(int64_t rowIndex) const override; 25 | 26 | private: 27 | ArrowArrayView* m_array; 28 | }; 29 | 30 | class NumpyIntConverter : public IColumnConverter { 31 | public: 32 | explicit NumpyIntConverter(ArrowArrayView* array, PyObject* context) 33 | : m_array(array), m_context(context) {} 34 | 35 | PyObject* toPyObject(int64_t rowIndex) const override; 36 | 37 | private: 38 | ArrowArrayView* m_array; 39 | 40 | PyObject* m_context; 41 | }; 42 | 43 | } // namespace sf 44 | 45 | #endif // PC_INTCONVERTER_HPP 46 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/IntervalConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_INTERVALCONVERTER_HPP 2 | #define PC_INTERVALCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "nanoarrow.h" 8 | #include "nanoarrow.hpp" 9 | 10 | namespace sf { 11 | 12 | class IntervalYearMonthConverter : public IColumnConverter { 13 | public: 14 | explicit IntervalYearMonthConverter(ArrowArrayView* array, PyObject* context, 15 | bool useNumpy); 16 | virtual ~IntervalYearMonthConverter() = default; 17 | 18 | PyObject* toPyObject(int64_t rowIndex) const override; 19 | 20 | private: 21 | ArrowArrayView* m_array; 22 | PyObject* m_context; 23 | bool m_useNumpy; 24 | }; 25 | 26 | class IntervalDayTimeConverterInt : public IColumnConverter { 27 | public: 28 | explicit IntervalDayTimeConverterInt(ArrowArrayView* array, PyObject* context, 29 | bool useNumpy); 30 | virtual ~IntervalDayTimeConverterInt() = default; 31 | 32 | PyObject* toPyObject(int64_t rowIndex) const override; 33 | 34 | private: 35 | ArrowArrayView* m_array; 36 | PyObject* m_context; 37 | const char* m_method; 38 | }; 39 | 40 | class IntervalDayTimeConverterDecimal : public IColumnConverter { 41 | public: 42 | explicit IntervalDayTimeConverterDecimal(ArrowArrayView* array, 43 | PyObject* context, bool useNumpy); 44 | virtual ~IntervalDayTimeConverterDecimal() = default; 45 | 46 | PyObject* toPyObject(int64_t rowIndex) const override; 47 | 48 | private: 49 | ArrowArrayView* m_array; 50 | PyObject* m_context; 51 | const char* m_method; 52 | }; 53 | 54 | } // namespace sf 55 | 56 | #endif // PC_INTERVALCONVERTER_HPP 57 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/MapConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_MAPCONVERTER_HPP 2 | #define PC_MAPCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | #include "nanoarrow.hpp" 10 | 11 | namespace sf { 12 | 13 | class MapConverter : public IColumnConverter { 14 | public: 15 | explicit MapConverter(ArrowSchemaView* schemaView, ArrowArrayView* array, 16 | PyObject* context, bool useNumpy); 17 | 18 | PyObject* toPyObject(int64_t rowIndex) const override; 19 | 20 | private: 21 | void generateError(const std::string& msg) const; 22 | 23 | ArrowArrayView* m_array; 24 | std::shared_ptr m_key_converter; 25 | std::shared_ptr m_value_converter; 26 | static Logger* logger; 27 | }; 28 | 29 | } // namespace sf 30 | #endif // PC_MAPCONVERTER_HPP 31 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/ObjectConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "ObjectConverter.hpp" 2 | 3 | #include 4 | 5 | #include "CArrowChunkIterator.hpp" 6 | #include "CArrowIterator.hpp" 7 | #include "SnowflakeType.hpp" 8 | 9 | namespace sf { 10 | Logger* ObjectConverter::logger = 11 | new Logger("snowflake.connector.BinaryConverter"); 12 | 13 | ObjectConverter::ObjectConverter(ArrowSchemaView* schemaView, 14 | ArrowArrayView* array, PyObject* context, 15 | bool useNumpy) { 16 | m_array = array; 17 | m_converters.clear(); 18 | m_property_names.clear(); 19 | m_propertyCount = schemaView->schema->n_children; 20 | 21 | for (int i = 0; i < schemaView->schema->n_children; i++) { 22 | ArrowSchema* property_schema = schemaView->schema->children[i]; 23 | 24 | m_property_names.push_back(property_schema->name); 25 | 26 | ArrowArrayView* child_array = array->children[i]; 27 | 28 | m_converters.push_back(getConverterFromSchema(property_schema, child_array, 29 | context, useNumpy, logger)); 30 | } 31 | } 32 | 33 | PyObject* ObjectConverter::toPyObject(int64_t rowIndex) const { 34 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 35 | Py_RETURN_NONE; 36 | } 37 | 38 | PyObject* dict = PyDict_New(); 39 | for (int i = 0; i < m_propertyCount; i++) { 40 | PyDict_SetItemString(dict, m_property_names[i], 41 | m_converters[i]->toPyObject(rowIndex)); 42 | } 43 | return dict; 44 | } 45 | 46 | } // namespace sf 47 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/ObjectConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_OBJECTCONVERTER_HPP 2 | #define PC_OBJECTCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | #include "nanoarrow.hpp" 10 | 11 | namespace sf { 12 | 13 | class ObjectConverter : public IColumnConverter { 14 | public: 15 | explicit ObjectConverter(ArrowSchemaView* schemaView, ArrowArrayView* array, 16 | PyObject* context, bool useNumpy); 17 | PyObject* toPyObject(int64_t rowIndex) const override; 18 | 19 | private: 20 | static Logger* logger; 21 | ArrowArrayView* m_array; 22 | int m_propertyCount; 23 | std::vector m_property_names; 24 | std::vector> m_converters; 25 | }; 26 | 27 | } // namespace sf 28 | 29 | #endif // PC_OBJECTCONVERTER_HPP 30 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/Python/Common.cpp: -------------------------------------------------------------------------------- 1 | #include "Common.hpp" 2 | 3 | namespace sf { 4 | 5 | namespace py { 6 | // this file will be deleted if it is not used in the future 7 | } 8 | } // namespace sf 9 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/Python/Helpers.cpp: -------------------------------------------------------------------------------- 1 | #include "Helpers.hpp" 2 | 3 | #include 4 | 5 | #include "Common.hpp" 6 | 7 | namespace sf { 8 | 9 | namespace py { 10 | 11 | using Logger = ::sf::Logger; 12 | 13 | void importPythonModule(const std::string& moduleName, UniqueRef& ref) { 14 | PyObject* module = PyImport_ImportModule(moduleName.c_str()); 15 | if (checkPyError()) { 16 | return; 17 | } 18 | ref.reset(module); 19 | return; 20 | } 21 | 22 | void importPythonModule(const std::string& moduleName, UniqueRef& ref, 23 | Logger& logger) { 24 | PyObject* module = PyImport_ImportModule(moduleName.c_str()); 25 | if (checkPyError()) { 26 | logger.error(__FILE__, __func__, __LINE__, 27 | "import python module '%s' failed", moduleName.c_str()); 28 | return; 29 | } 30 | ref.reset(module); 31 | return; 32 | } 33 | 34 | void importFromModule(const UniqueRef& moduleRef, const std::string& name, 35 | UniqueRef& ref) { 36 | PyObject* attr = PyObject_GetAttrString(moduleRef.get(), name.c_str()); 37 | if (checkPyError()) { 38 | return; 39 | } 40 | ref.reset(attr); 41 | return; 42 | } 43 | 44 | void importFromModule(const UniqueRef& moduleRef, const std::string& name, 45 | UniqueRef& ref, Logger& logger) { 46 | PyObject* attr = PyObject_GetAttrString(moduleRef.get(), name.c_str()); 47 | if (checkPyError()) { 48 | logger.error(__FILE__, __func__, __LINE__, 49 | "import python attribute '%s' failed", name.c_str()); 50 | return; 51 | } 52 | ref.reset(attr); 53 | return; 54 | } 55 | 56 | } // namespace py 57 | } // namespace sf 58 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/Python/Helpers.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_PYTHON_HELPERS_HPP 2 | #define PC_PYTHON_HELPERS_HPP 3 | 4 | #include 5 | 6 | #include "logging.hpp" 7 | 8 | namespace sf { 9 | 10 | namespace py { 11 | 12 | class UniqueRef; 13 | 14 | using Logger = ::sf::Logger; 15 | 16 | /** 17 | * \brief: import a python module 18 | * \param moduleName: the name of the python module 19 | * \param ref: the RAII object to manage the PyObject 20 | * \return: 21 | */ 22 | void importPythonModule(const std::string& moduleName, UniqueRef& ref); 23 | 24 | void importPythonModule(const std::string& moduleName, UniqueRef& ref, 25 | const Logger& logger); 26 | 27 | void importFromModule(const UniqueRef& moduleRef, const std::string& name, 28 | UniqueRef& ref); 29 | 30 | void importFromModule(const UniqueRef& moduleRef, const std::string& name, 31 | UniqueRef& ref, const Logger& logger); 32 | 33 | } // namespace py 34 | } // namespace sf 35 | 36 | #endif // PC_PYTHON_HELPERS_HPP 37 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/SnowflakeType.cpp: -------------------------------------------------------------------------------- 1 | #include "SnowflakeType.hpp" 2 | 3 | namespace sf { 4 | 5 | std::unordered_map 6 | SnowflakeType::m_strEnumIndex = { 7 | {"ANY", SnowflakeType::Type::ANY}, 8 | {"ARRAY", SnowflakeType::Type::ARRAY}, 9 | {"BINARY", SnowflakeType::Type::BINARY}, 10 | {"BOOLEAN", SnowflakeType::Type::BOOLEAN}, 11 | {"CHAR", SnowflakeType::Type::CHAR}, 12 | {"DATE", SnowflakeType::Type::DATE}, 13 | {"DOUBLE PRECISION", SnowflakeType::Type::REAL}, 14 | {"DOUBLE", SnowflakeType::Type::REAL}, 15 | {"FIXED", SnowflakeType::Type::FIXED}, 16 | {"DECFLOAT", SnowflakeType::Type::DECFLOAT}, 17 | {"FLOAT", SnowflakeType::Type::REAL}, 18 | {"INTERVAL_YEAR_MONTH", SnowflakeType::Type::INTERVAL_YEAR_MONTH}, 19 | {"INTERVAL_DAY_TIME", SnowflakeType::Type::INTERVAL_DAY_TIME}, 20 | {"MAP", SnowflakeType::Type::MAP}, 21 | {"OBJECT", SnowflakeType::Type::OBJECT}, 22 | {"REAL", SnowflakeType::Type::REAL}, 23 | {"STRING", SnowflakeType::Type::TEXT}, 24 | {"TEXT", SnowflakeType::Type::TEXT}, 25 | {"TIME", SnowflakeType::Type::TIME}, 26 | {"TIMESTAMP", SnowflakeType::Type::TIMESTAMP}, 27 | {"TIMESTAMP_LTZ", SnowflakeType::Type::TIMESTAMP_LTZ}, 28 | {"TIMESTAMP_NTZ", SnowflakeType::Type::TIMESTAMP_NTZ}, 29 | {"TIMESTAMP_TZ", SnowflakeType::Type::TIMESTAMP_TZ}, 30 | {"VARCHAR", SnowflakeType::Type::TEXT}, 31 | {"VARIANT", SnowflakeType::Type::VARIANT}, 32 | {"VECTOR", SnowflakeType::Type::VECTOR}}; 33 | 34 | } // namespace sf 35 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/SnowflakeType.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_SNOWFLAKETYPE_HPP 2 | #define PC_SNOWFLAKETYPE_HPP 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | namespace sf { 10 | 11 | class SnowflakeType { 12 | public: 13 | enum class Type : uint8_t { 14 | ANY = 0, 15 | ARRAY = 1, 16 | BINARY = 2, 17 | BOOLEAN = 3, 18 | CHAR = 4, 19 | DATE = 5, 20 | FIXED = 6, 21 | OBJECT = 7, 22 | REAL = 8, 23 | TEXT = 9, 24 | TIME = 10, 25 | TIMESTAMP = 11, 26 | TIMESTAMP_LTZ = 12, 27 | TIMESTAMP_NTZ = 13, 28 | TIMESTAMP_TZ = 14, 29 | VARIANT = 15, 30 | VECTOR = 16, 31 | MAP = 17, 32 | DECFLOAT = 18, 33 | INTERVAL_YEAR_MONTH = 19, 34 | INTERVAL_DAY_TIME = 20, 35 | }; 36 | 37 | static SnowflakeType::Type snowflakeTypeFromString(std::string str) { 38 | std::transform(str.begin(), str.end(), str.begin(), ::toupper); 39 | return m_strEnumIndex.at(str); 40 | } 41 | 42 | private: 43 | static std::unordered_map m_strEnumIndex; 44 | }; 45 | 46 | } // namespace sf 47 | 48 | #endif // PC_SNOWFLAKETYPE_HPP 49 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/StringConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "StringConverter.hpp" 2 | 3 | #include 4 | 5 | namespace sf { 6 | Logger* StringConverter::logger = 7 | new Logger("snowflake.connector.StringConverter"); 8 | 9 | StringConverter::StringConverter(ArrowArrayView* array) : m_array(array) {} 10 | 11 | PyObject* StringConverter::toPyObject(int64_t rowIndex) const { 12 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 13 | Py_RETURN_NONE; 14 | } 15 | ArrowStringView stringView = ArrowArrayViewGetStringUnsafe(m_array, rowIndex); 16 | return PyUnicode_FromStringAndSize(stringView.data, stringView.size_bytes); 17 | } 18 | 19 | } // namespace sf 20 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/StringConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_STRINGCONVERTER_HPP 2 | #define PC_STRINGCONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "logging.hpp" 8 | #include "nanoarrow.h" 9 | #include "nanoarrow.hpp" 10 | 11 | namespace sf { 12 | 13 | class StringConverter : public IColumnConverter { 14 | public: 15 | explicit StringConverter(ArrowArrayView* array); 16 | PyObject* toPyObject(int64_t rowIndex) const override; 17 | 18 | private: 19 | ArrowArrayView* m_array; 20 | 21 | static Logger* logger; 22 | }; 23 | 24 | } // namespace sf 25 | 26 | #endif // PC_STRINGCONVERTER_HPP 27 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/TimeConverter.cpp: -------------------------------------------------------------------------------- 1 | #include "TimeConverter.hpp" 2 | 3 | namespace sf { 4 | 5 | TimeConverter::TimeConverter(ArrowArrayView* array, int32_t scale) 6 | : m_array(array), m_scale(scale) {} 7 | 8 | PyObject* TimeConverter::toPyObject(int64_t rowIndex) const { 9 | if (ArrowArrayViewIsNull(m_array, rowIndex)) { 10 | Py_RETURN_NONE; 11 | } 12 | 13 | int64_t seconds = ArrowArrayViewGetIntUnsafe(m_array, rowIndex); 14 | using namespace internal; 15 | py::PyUniqueLock lock; 16 | return PyObject_CallFunction(m_pyDatetimeTime().get(), "iiii", 17 | getHourFromSeconds(seconds, m_scale), 18 | getMinuteFromSeconds(seconds, m_scale), 19 | getSecondFromSeconds(seconds, m_scale), 20 | getMicrosecondFromSeconds(seconds, m_scale)); 21 | } 22 | 23 | py::UniqueRef& TimeConverter::m_pyDatetimeTime() { 24 | static py::UniqueRef pyDatetimeTime; 25 | if (pyDatetimeTime.empty()) { 26 | py::PyUniqueLock lock; 27 | py::UniqueRef pyDatetimeModule; 28 | py::importPythonModule("datetime", pyDatetimeModule); 29 | /** TODO : to check status here */ 30 | 31 | py::importFromModule(pyDatetimeModule, "time", pyDatetimeTime); 32 | } 33 | return pyDatetimeTime; 34 | } 35 | 36 | } // namespace sf 37 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/TimeConverter.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_TIMECONVERTER_HPP 2 | #define PC_TIMECONVERTER_HPP 3 | 4 | #include 5 | 6 | #include "IColumnConverter.hpp" 7 | #include "Python/Common.hpp" 8 | #include "Python/Helpers.hpp" 9 | #include "Util/time.hpp" 10 | #include "nanoarrow.h" 11 | 12 | namespace sf { 13 | 14 | class TimeConverter : public IColumnConverter { 15 | public: 16 | explicit TimeConverter(ArrowArrayView* array, int32_t scale); 17 | 18 | PyObject* toPyObject(int64_t rowIndex) const override; 19 | 20 | private: 21 | /** can be arrow::Int32Array and arrow::Int64Array */ 22 | ArrowArrayView* m_array; 23 | 24 | int32_t m_scale; 25 | 26 | static py::UniqueRef& m_pyDatetimeTime(); 27 | }; 28 | 29 | } // namespace sf 30 | 31 | #endif // PC_TIMECONVERTER_HPP 32 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/Util/macros.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_UTIL_MACROS_HPP 2 | #define PC_UTIL_MACROS_HPP 3 | 4 | /** the same macros as linux kernel's likely and unlikely. can help specific 5 | * compiler to make branch prediction */ 6 | #if defined(__GNUC__) 7 | #define LIKELY(x) (__builtin_expect(!!(x), 1)) 8 | #define UNLIKELY(x) (__builtin_expect(!!(x), 0)) 9 | #else 10 | #define LIKELY(x) x 11 | #define UNLIKELY(x) x 12 | #endif // __GNUC__ 13 | 14 | #endif // PC_UTIL_MACROS_HPP 15 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/flatcc_assert.h: -------------------------------------------------------------------------------- 1 | #ifndef FLATCC_ASSERT_H 2 | #define FLATCC_ASSERT_H 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | /* 9 | * This assert abstraction is only used for the flatcc runtime library. 10 | * The flatcc compiler uses Posix assert routines regardless of how this 11 | * file is configured. 12 | * 13 | * This header makes it possible to use systems where assert is not 14 | * valid to use. Note that `` may remain a dependency for static 15 | * assertions. 16 | * 17 | * `FLATCC_ASSERT` is designed to handle errors which cannot be ignored 18 | * and could lead to crash. The portable library may use assertions that 19 | * are not affected by this macro. 20 | * 21 | * `FLATCC_ASSERT` defaults to POSIX assert but can be overrided by a 22 | * preprocessor definition. 23 | * 24 | * Runtime assertions can be entirely disabled by defining 25 | * `FLATCC_NO_ASSERT`. 26 | */ 27 | 28 | #ifdef FLATCC_NO_ASSERT 29 | /* NOTE: This will not affect inclusion of for static assertions. */ 30 | #undef FLATCC_ASSERT 31 | #define FLATCC_ASSERT(x) ((void)0) 32 | /* Grisu3 is used for floating point conversion in JSON processing. */ 33 | #define GRISU3_NO_ASSERT 34 | #endif 35 | 36 | #ifndef FLATCC_ASSERT 37 | #include 38 | #define FLATCC_ASSERT assert 39 | #endif 40 | 41 | #ifdef __cplusplus 42 | } 43 | #endif 44 | 45 | #endif /* FLATCC_ASSERT_H */ 46 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/flatcc_epilogue.h: -------------------------------------------------------------------------------- 1 | /* Include guard intentionally left out. */ 2 | 3 | #ifdef __cplusplus 4 | } 5 | #endif 6 | 7 | #include "flatcc/portable/pdiagnostic_pop.h" 8 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/flatcc_flatbuffers.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Even C11 compilers depend on clib support for `static_assert` which 3 | * isn't always present, so we deal with this here for all compilers. 4 | * 5 | * Outside include guard to handle scope counter. 6 | */ 7 | #include "flatcc/portable/pstatic_assert.h" 8 | 9 | #ifndef FLATCC_FLATBUFFERS_H 10 | #define FLATCC_FLATBUFFERS_H 11 | 12 | #ifdef __cplusplus 13 | extern "C" { 14 | #endif 15 | 16 | #ifndef flatcc_flatbuffers_defined 17 | #define flatcc_flatbuffers_defined 18 | 19 | #ifdef FLATCC_PORTABLE 20 | #include "flatcc/flatcc_portable.h" 21 | #endif 22 | #include "flatcc/portable/pwarnings.h" 23 | /* Needed by C99 compilers without FLATCC_PORTABLE. */ 24 | #include "flatcc/portable/pstdalign.h" 25 | 26 | /* Handle fallthrough attribute in switch statements. */ 27 | #include "flatcc/portable/pattributes.h" 28 | 29 | #include "flatcc/flatcc_alloc.h" 30 | #include "flatcc/flatcc_assert.h" 31 | 32 | #define __FLATBUFFERS_PASTE2(a, b) a ## b 33 | #define __FLATBUFFERS_PASTE3(a, b, c) a ## b ## c 34 | #define __FLATBUFFERS_CONCAT(a, b) __FLATBUFFERS_PASTE2(a, b) 35 | 36 | /* 37 | * "flatcc_endian.h" requires the preceeding include files, 38 | * or compatible definitions. 39 | */ 40 | #include "flatcc/portable/pendian.h" 41 | #include "flatcc/flatcc_types.h" 42 | #include "flatcc/flatcc_endian.h" 43 | #include "flatcc/flatcc_identifier.h" 44 | 45 | #ifndef FLATBUFFERS_WRAP_NAMESPACE 46 | #define FLATBUFFERS_WRAP_NAMESPACE(ns, x) ns ## _ ## x 47 | #endif 48 | 49 | #endif /* flatcc_flatbuffers_defined */ 50 | 51 | #ifdef __cplusplus 52 | } 53 | #endif 54 | 55 | #endif /* FLATCC_FLATBUFFERS_H */ 56 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/flatcc_iov.h: -------------------------------------------------------------------------------- 1 | #ifndef FLATCC_IOV_H 2 | #define FLATCC_IOV_H 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | #include 9 | 10 | /* 11 | * The emitter receives one, or a few buffers at a time via 12 | * this type. compatible iovec structure used for 13 | * allocation and emitter interface. 14 | */ 15 | typedef struct flatcc_iovec flatcc_iovec_t; 16 | struct flatcc_iovec { 17 | void *iov_base; 18 | size_t iov_len; 19 | }; 20 | 21 | /* 22 | * The largest iovec vector the builder will issue. It will 23 | * always be a relatively small number. 24 | */ 25 | #define FLATCC_IOV_COUNT_MAX 8 26 | 27 | #ifdef __cplusplus 28 | } 29 | #endif 30 | 31 | #endif /* FLATCC_IOV_H */ 32 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/flatcc_prologue.h: -------------------------------------------------------------------------------- 1 | /* Include guard intentionally left out. */ 2 | 3 | #define PDIAGNOSTIC_IGNORE_UNUSED 4 | #include "flatcc/portable/pdiagnostic_push.h" 5 | 6 | #ifdef __cplusplus 7 | extern "C" { 8 | #endif 9 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/flatcc_portable.h: -------------------------------------------------------------------------------- 1 | #ifndef FLATCC_PORTABLE_H 2 | #define FLATCC_PORTABLE_H 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | #include "flatcc/portable/portable_basic.h" 9 | 10 | #ifdef __cplusplus 11 | } 12 | #endif 13 | 14 | #endif /* FLATCC_PORTABLE_H */ 15 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pdiagnostic_pop.h: -------------------------------------------------------------------------------- 1 | #if defined(PDIAGNOSTIC_PUSHED_MSVC) 2 | #if PDIAGNOSTIC_PUSHED_MSVC 3 | #pragma warning( pop ) 4 | #endif // PDIAGNOSTIC_PUSHED_MSVC 5 | #undef PDIAGNOSTIC_PUSHED_MSVC 6 | #endif // defined(PDIAGNOSTIC_PUSHED_MSVC) 7 | 8 | #if defined(PDIAGNOSTIC_PUSHED_CLANG) 9 | #if PDIAGNOSTIC_PUSHED_CLANG 10 | #pragma clang diagnostic pop 11 | #endif // PDIAGNOSTIC_PUSHED_CLANG 12 | #undef PDIAGNOSTIC_PUSHED_CLANG 13 | #endif // defined(PDIAGNOSTIC_PUSHED_CLANG) 14 | 15 | #if defined(PDIAGNOSTIC_PUSHED_GCC) 16 | #if PDIAGNOSTIC_PUSHED_GCC 17 | #pragma GCC diagnostic pop 18 | #endif // PDIAGNOSTIC_PUSHED_GCC 19 | #undef PDIAGNOSTIC_PUSHED_GCC 20 | #endif // defined(PDIAGNOSTIC_PUSHED_GCC) 21 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pdiagnostic_push.h: -------------------------------------------------------------------------------- 1 | /* 2 | * See also comment in "pdiagnostic.h" 3 | * 4 | * e.g. 5 | * #define PDIAGNOSTIC_IGNORE_USED_FUNCTION 6 | * #define PDIAGNOSTIC_IGNORE_USED_VARIABLE 7 | * #include "pdiagnostic_push" 8 | * ... 9 | * #include "pdiagnostic_pop.h" 10 | * 11 | * 12 | * or if push pop isn't desired: 13 | * #define PDIAGNOSTIC_IGNORE_USED_FUNCTION 14 | * #define PDIAGNOSTIC_IGNORE_USED_VARIABLE 15 | * #include "pdiagnostic.h" 16 | * ... 17 | * 18 | * 19 | * 20 | * Some if these warnings cannot be ignored 21 | * at the #pragma level, but might in the future. 22 | * Use compiler switches like -Wno-unused-function 23 | * to work around this. 24 | */ 25 | 26 | #if defined(_MSC_VER) 27 | #pragma warning( push ) 28 | #define PDIAGNOSTIC_PUSHED_MSVC 1 29 | #else 30 | #define PDIAGNOSTIC_PUSHED_MSVC 0 31 | #endif 32 | 33 | #if defined(__clang__) 34 | #pragma clang diagnostic push 35 | #define PDIAGNOSTIC_PUSHED_CLANG 1 36 | #else 37 | #define PDIAGNOSTIC_PUSHED_CLANG 0 38 | #endif 39 | 40 | #if defined(__GNUC__) && !defined(__clang__) 41 | #if ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) 42 | #pragma GCC diagnostic push 43 | #define PDIAGNOSTIC_PUSHED_GCC 1 44 | #else 45 | #define PDIAGNOSTIC_PUSHED_GCC 0 46 | #endif // GNUC >= 4.6 47 | #else 48 | #define PDIAGNOSTIC_PUSHED_GCC 0 49 | #endif // defined(__GNUC__) && !defined(__clang__) 50 | 51 | #include "pdiagnostic.h" 52 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pinline.h: -------------------------------------------------------------------------------- 1 | #ifndef PINLINE_H 2 | #define PINLINE_H 3 | 4 | #ifndef __cplusplus 5 | 6 | #if (defined(__STDC__) && __STDC__ && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) 7 | /* C99 or newer */ 8 | #elif _MSC_VER >= 1500 /* MSVC 9 or newer */ 9 | #undef inline 10 | #define inline __inline 11 | #elif __GNUC__ >= 3 /* GCC 3 or newer */ 12 | #define inline __inline 13 | #else /* Unknown or ancient */ 14 | #define inline 15 | #endif 16 | 17 | #endif /* __cplusplus */ 18 | 19 | #endif /* PINLINE_H */ 20 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pinttypes.h: -------------------------------------------------------------------------------- 1 | #ifndef PINTTYPES_H 2 | #define PINTTYPES_H 3 | 4 | #ifndef PRId16 5 | 6 | #if (defined(__STDC__) && __STDC__ && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) 7 | /* C99 or newer */ 8 | #include 9 | #else 10 | 11 | /* 12 | * This is not a complete implementation of , just the most 13 | * useful printf modifiers. 14 | */ 15 | 16 | #include "pstdint.h" 17 | 18 | #ifndef PRINTF_INT64_MODIFIER 19 | #error "please define PRINTF_INT64_MODIFIER" 20 | #endif 21 | 22 | #ifndef PRId64 23 | #define PRId64 PRINTF_INT64_MODIFIER "d" 24 | #define PRIu64 PRINTF_INT64_MODIFIER "u" 25 | #define PRIx64 PRINTF_INT64_MODIFIER "x" 26 | #endif 27 | 28 | #ifndef PRINTF_INT32_MODIFIER 29 | #define PRINTF_INT32_MODIFIER "l" 30 | #endif 31 | 32 | #ifndef PRId32 33 | #define PRId32 PRINTF_INT32_MODIFIER "d" 34 | #define PRIu32 PRINTF_INT32_MODIFIER "u" 35 | #define PRIx32 PRINTF_INT32_MODIFIER "x" 36 | #endif 37 | 38 | #ifndef PRINTF_INT16_MODIFIER 39 | #define PRINTF_INT16_MODIFIER "h" 40 | #endif 41 | 42 | #ifndef PRId16 43 | #define PRId16 PRINTF_INT16_MODIFIER "d" 44 | #define PRIu16 PRINTF_INT16_MODIFIER "u" 45 | #define PRIx16 PRINTF_INT16_MODIFIER "x" 46 | #endif 47 | 48 | # endif /* __STDC__ */ 49 | 50 | #endif /* PRId16 */ 51 | 52 | #endif /* PINTTYPES */ 53 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/portable.h: -------------------------------------------------------------------------------- 1 | /* portable.h is widely used, so we redirect to a less conflicting name. */ 2 | #include "portable_basic.h" 3 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/portable_basic.h: -------------------------------------------------------------------------------- 1 | #ifndef PORTABLE_BASIC_H 2 | #define PORTABLE_BASIC_H 3 | 4 | /* 5 | * Basic features need to make compilers support the most common moden C 6 | * features, and endian / unligned read support as well. 7 | * 8 | * It is not assumed that this file is always included. 9 | * Other include files are independent or include what they need. 10 | */ 11 | 12 | #include "pversion.h" 13 | #include "pwarnings.h" 14 | 15 | /* Featutures that ought to be supported by C11, but some aren't. */ 16 | #include "pinttypes.h" 17 | #include "pstdalign.h" 18 | #include "pinline.h" 19 | #include "pstatic_assert.h" 20 | 21 | /* These are not supported by C11 and are general platform abstractions. */ 22 | #include "pendian.h" 23 | #include "punaligned.h" 24 | 25 | #endif /* PORTABLE_BASIC_H */ 26 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pversion.h: -------------------------------------------------------------------------------- 1 | #define PORTABLE_VERSION_TEXT "0.2.6-pre" 2 | #define PORTABLE_VERSION_MAJOR 0 3 | #define PORTABLE_VERSION_MINOR 2 4 | #define PORTABLE_VERSION_PATCH 6 5 | /* 1 or 0 */ 6 | #define PORTABLE_VERSION_RELEASED 0 7 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/ArrowIterator/flatcc/portable/pwarnings.h: -------------------------------------------------------------------------------- 1 | #ifndef PWARNINGS_H 2 | #define PWARNINGS_H 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | /* 9 | * See also pdiagnostics.h headers for per file control of common 10 | * warnings. 11 | * 12 | * This file is intended for global disabling of warnings that shouldn't 13 | * be present in C11 or perhaps C99, or a generally just noise where 14 | * recent clang / gcc compile cleanly with high warning levels. 15 | */ 16 | 17 | #if defined(_MSC_VER) 18 | /* Needed when flagging code in or out and more. */ 19 | #pragma warning(disable: 4127) /* conditional expression is constant */ 20 | /* happens also in MS's own headers. */ 21 | #pragma warning(disable: 4668) /* preprocessor name not defined */ 22 | /* MSVC does not respect double parenthesis for intent */ 23 | #pragma warning(disable: 4706) /* assignment within conditional expression */ 24 | /* `inline` only advisory anyway. */ 25 | #pragma warning(disable: 4710) /* function not inlined */ 26 | /* Well, we don't intend to add the padding manually. */ 27 | #pragma warning(disable: 4820) /* x bytes padding added in struct */ 28 | 29 | /* 30 | * Don't warn that fopen etc. are unsafe 31 | * 32 | * Define a compiler flag like `-D_CRT_SECURE_NO_WARNINGS` in the build. 33 | * For some reason it doesn't work when defined here. 34 | * 35 | * #define _CRT_SECURE_NO_WARNINGS 36 | */ 37 | 38 | /* 39 | * Anonymous union in struct is valid in C11 and has been supported in 40 | * GCC and Clang for a while, but it is not C99. MSVC also handles it, 41 | * but warns. Truly portable code should perhaps not use this feature, 42 | * but this is not the place to complain about it. 43 | */ 44 | #pragma warning(disable: 4201) /* nonstandard extension used: nameless struct/union */ 45 | 46 | #endif /* _MSV_VER */ 47 | 48 | #ifdef __cplusplus 49 | } 50 | #endif 51 | 52 | #endif /* PWARNINGS_H */ 53 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/Logging/logging.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PC_LOGGING_HPP 2 | #define PC_LOGGING_HPP 3 | 4 | #include 5 | 6 | #include "Python/Common.hpp" 7 | 8 | namespace sf { 9 | 10 | class Logger { 11 | public: 12 | explicit Logger(const char *name); 13 | 14 | void log(int level, const char *path_name, const char *func_name, 15 | int line_num, const char *msg); 16 | 17 | void debug(const char *path_name, const char *func_name, int line_num, 18 | const char *format, ...); 19 | 20 | void info(const char *path_name, const char *func_name, int line_num, 21 | const char *format, ...); 22 | 23 | void warn(const char *path_name, const char *func_name, int line_num, 24 | const char *format, ...); 25 | 26 | void error(const char *path_name, const char *func_name, int line_num, 27 | const char *format, ...); 28 | 29 | static std::string formatString(const char *fmt, ...); 30 | 31 | private: 32 | py::UniqueRef m_pyLogger; 33 | const char *const m_name; 34 | static constexpr int CRITICAL = 50; 35 | static constexpr int FATAL = CRITICAL; 36 | static constexpr int ERROR = 40; 37 | static constexpr int WARNING = 30; 38 | static constexpr int WARN = WARNING; 39 | static constexpr int INFO = 20; 40 | static constexpr int DEBUG = 10; 41 | static constexpr int NOTSET = 0; 42 | static constexpr int LINE_NUM = 0; 43 | 44 | void setupPyLogger(); 45 | }; 46 | 47 | } // namespace sf 48 | 49 | #endif // PC_LOGGING_HPP 50 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/scripts/.clang-format: -------------------------------------------------------------------------------- 1 | BasedOnStyle: Google 2 | AccessModifierOffset: -2 3 | BreakBeforeBraces: Allman 4 | ConstructorInitializerIndentWidth: 0 5 | IndentCaseLabels: true 6 | IndentWidth: 2 7 | NamespaceIndentation: None 8 | PointerBindsToType: true 9 | -------------------------------------------------------------------------------- /src/snowflake/connector/nanoarrow_cpp/scripts/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | 5 | find $THIS_DIR/.. -iname *.h -o -iname *.c -o -iname *.cpp -o -iname *.hpp \ 6 | | xargs clang-format -style=file -i 7 | 8 | exit 0 9 | -------------------------------------------------------------------------------- /src/snowflake/connector/proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import os 5 | 6 | 7 | def set_proxies( 8 | proxy_host: str | None, 9 | proxy_port: str | None, 10 | proxy_user: str | None = None, 11 | proxy_password: str | None = None, 12 | ) -> dict[str, str] | None: 13 | """Sets proxy dict for requests.""" 14 | PREFIX_HTTP = "http://" 15 | PREFIX_HTTPS = "https://" 16 | proxies = None 17 | if proxy_host and proxy_port: 18 | if proxy_host.startswith(PREFIX_HTTP): 19 | proxy_host = proxy_host[len(PREFIX_HTTP) :] 20 | elif proxy_host.startswith(PREFIX_HTTPS): 21 | proxy_host = proxy_host[len(PREFIX_HTTPS) :] 22 | if proxy_user or proxy_password: 23 | proxy_auth = "{proxy_user}:{proxy_password}@".format( 24 | proxy_user=proxy_user if proxy_user is not None else "", 25 | proxy_password=proxy_password if proxy_password is not None else "", 26 | ) 27 | else: 28 | proxy_auth = "" 29 | proxies = { 30 | "http": "http://{proxy_auth}{proxy_host}:{proxy_port}".format( 31 | proxy_host=proxy_host, 32 | proxy_port=str(proxy_port), 33 | proxy_auth=proxy_auth, 34 | ), 35 | "https": "http://{proxy_auth}{proxy_host}:{proxy_port}".format( 36 | proxy_host=proxy_host, 37 | proxy_port=str(proxy_port), 38 | proxy_auth=proxy_auth, 39 | ), 40 | } 41 | os.environ["HTTP_PROXY"] = proxies["http"] 42 | os.environ["HTTPS_PROXY"] = proxies["https"] 43 | return proxies 44 | -------------------------------------------------------------------------------- /src/snowflake/connector/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/py.typed -------------------------------------------------------------------------------- /src/snowflake/connector/sfbinaryformat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from base64 import b16decode, b16encode, standard_b64encode 5 | 6 | from .errors import InternalError 7 | 8 | # Converts a Snowflake binary value into a "bytes" object. 9 | binary_to_python = b16decode 10 | 11 | 12 | def binary_to_snowflake(binary_value) -> bytes | bytearray: 13 | """Encodes a "bytes" object for passing to Snowflake.""" 14 | result = b16encode(binary_value) 15 | 16 | if isinstance(binary_value, bytearray): 17 | return bytearray(result) 18 | return result 19 | 20 | 21 | class SnowflakeBinaryFormat: 22 | """Formats binary values ("bytes" objects) in hex or base64.""" 23 | 24 | def __init__(self, name) -> None: 25 | name = name.upper() 26 | if name == "HEX": 27 | self._encode = b16encode 28 | elif name == "BASE64": 29 | self._encode = standard_b64encode 30 | else: 31 | raise InternalError(f"Unrecognized binary format {name}") 32 | 33 | def format(self, binary_value): 34 | """Formats a "bytes" object, returning a string.""" 35 | return self._encode(binary_value).decode("ascii") 36 | -------------------------------------------------------------------------------- /src/snowflake/connector/sqlstate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED = "08001" 3 | SQLSTATE_CONNECTION_ALREADY_EXISTS = "08002" 4 | SQLSTATE_CONNECTION_NOT_EXISTS = "08003" 5 | SQLSTATE_CONNECTION_REJECTED = "08004" 6 | SQLSTATE_CONNECTION_FAILED_BUT_REESTABLISHED = "08506" 7 | SQLSTATE_HOST_NOT_FOUND = "08508" 8 | SQLSTATE_FEATURE_NOT_SUPPORTED = "0A000" 9 | SQLSTATE_IO_ERROR = "58030" 10 | -------------------------------------------------------------------------------- /src/snowflake/connector/ssd_internal_keys.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from binascii import unhexlify 5 | 6 | # key version 7 | ocsp_internal_dep1_key_ver = 0.1 8 | ocsp_internal_dep2_key_ver = 0.1 9 | 10 | # OCSP Hard coded public keys 11 | ocsp_internal_ssd_pub_dep1 = None 12 | ocsp_internal_ssd_pub_dep2 = None 13 | 14 | # Default cert if for key update directives 15 | SF_KEY_UPDATE_SSD_DEFAULT_CERT_ID = 0 16 | 17 | 18 | def ret_int_pub_key_ver(issuer): 19 | if issuer == "dep1": 20 | return ocsp_internal_dep1_key_ver 21 | else: 22 | return ocsp_internal_dep2_key_ver 23 | 24 | 25 | def ret_wildcard_hkey(): 26 | issuer_name_hash = unhexlify("040130") 27 | issuer_key_hash = unhexlify("040130") 28 | serial_number = unhexlify("020100") 29 | hkey = (issuer_name_hash, issuer_key_hash, serial_number) 30 | return hkey 31 | -------------------------------------------------------------------------------- /src/snowflake/connector/test_util.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import logging 5 | import os 6 | from typing import cast 7 | 8 | from .compat import IS_LINUX 9 | 10 | RUNNING_ON_JENKINS = os.getenv("JENKINS_HOME") is not None 11 | REGRESSION_TEST_LOG_DIR = os.getenv("CLIENT_LOG_DIR_PATH_DOCKER") 12 | ENABLE_TELEMETRY_LOG = RUNNING_ON_JENKINS and REGRESSION_TEST_LOG_DIR and IS_LINUX 13 | rt_plain_logger = None 14 | 15 | 16 | if ENABLE_TELEMETRY_LOG: 17 | rt_plain_logger = logging.getLogger("regression.test.plain.logger") 18 | rt_plain_logger.setLevel(logging.DEBUG) 19 | ch = logging.FileHandler( 20 | os.path.join( 21 | cast(str, REGRESSION_TEST_LOG_DIR), "snowflake_ssm_rt_telemetry.log" 22 | ) 23 | ) 24 | ch.setLevel(logging.DEBUG) 25 | ch.setFormatter( 26 | logging.Formatter( 27 | "%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s" 28 | ) 29 | ) 30 | rt_plain_logger.addHandler(ch) 31 | -------------------------------------------------------------------------------- /src/snowflake/connector/tool/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/tool/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/tool/dump_certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import os 5 | import sys 6 | from os import path 7 | from typing import TYPE_CHECKING 8 | 9 | from snowflake.connector.ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto 10 | 11 | if TYPE_CHECKING: 12 | from asn1crypto.x509 import Certificate 13 | 14 | 15 | def main() -> None: 16 | """Internal Tool: Extract certificate files in PEM.""" 17 | 18 | def help() -> None: 19 | print( 20 | "Extract certificate file. The target file can be a single file " 21 | "or a directory including multiple certificates. The certificate " 22 | "file format should be PEM." 23 | ) 24 | print( 25 | """ 26 | Usage: {} 27 | """.format( 28 | path.basename(sys.argv[0]) 29 | ) 30 | ) 31 | sys.exit(2) 32 | 33 | if len(sys.argv) < 2: 34 | help() 35 | 36 | input_filename = sys.argv[1] 37 | if path.isdir(input_filename): 38 | files = [path.join(input_filename, f) for f in os.listdir(input_filename)] 39 | else: 40 | files = [input_filename] 41 | 42 | for f in files: 43 | open(f) 44 | extract_certificate_file(f) 45 | 46 | 47 | def extract_certificate_file(input_filename) -> None: 48 | ocsp = SnowflakeOCSPAsn1Crypto() 49 | cert_map: dict[bytes, Certificate] = {} 50 | ocsp.read_cert_bundle(input_filename, cert_map) 51 | 52 | for cert in cert_map.values(): 53 | print(f"serial #: {cert.serial_number}, name: {cert.subject.native}") 54 | 55 | 56 | if __name__ == "__main__": 57 | main() 58 | -------------------------------------------------------------------------------- /src/snowflake/connector/url_util.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | import urllib.parse 5 | from logging import getLogger 6 | 7 | from .constants import _TOP_LEVEL_DOMAIN_REGEX 8 | 9 | logger = getLogger(__name__) 10 | 11 | 12 | URL_VALIDATOR = re.compile( 13 | "^http(s?)\\:\\/\\/[0-9a-zA-Z]([-.\\w]*[0-9a-zA-Z@:])*(:(0-9)*)*(\\/?)([a-zA-Z0-9\\-\\.\\?\\,\\&\\(\\)\\/\\\\\\+&%\\$#_=@:]*)?$" 14 | ) 15 | 16 | 17 | def is_valid_url(url: str) -> bool: 18 | """Confirms if the provided URL is a valid HTTP/ HTTPs URL 19 | 20 | Args: 21 | url: the URL that needs to be validated 22 | 23 | Returns: 24 | true/ false depending on whether the URL is valid or not 25 | """ 26 | return bool(URL_VALIDATOR.match(url)) 27 | 28 | 29 | def url_encode_str(target: str | None) -> str: 30 | """Converts a target string into escaped URL safe string 31 | 32 | Args: 33 | target: string to be URL encoded 34 | 35 | Returns: 36 | URL encoded string 37 | """ 38 | if target is None: 39 | logger.debug("The string to be URL encoded is None") 40 | return "" 41 | return urllib.parse.quote_plus(target, safe="") 42 | 43 | 44 | def extract_top_level_domain_from_hostname(hostname: str | None = None) -> str: 45 | if not hostname: 46 | return "com" 47 | # RFC1034 for TLD spec, and https://data.iana.org/TLD/tlds-alpha-by-domain.txt for full TLD list 48 | match = re.search(_TOP_LEVEL_DOMAIN_REGEX, hostname) 49 | return (match.group(0)[1:] if match else "com").lower() 50 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. 3 | # 4 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/requests/__version__.py: -------------------------------------------------------------------------------- 1 | # .-. .-. .-. . . .-. .-. .-. .-. 2 | # |( |- |.| | | |- `-. | `-. 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' 4 | 5 | __title__ = "requests" 6 | __description__ = "Python HTTP for Humans." 7 | __url__ = "https://requests.readthedocs.io" 8 | __version__ = "2.31.0" 9 | __build__ = 0x023100 10 | __author__ = "Kenneth Reitz" 11 | __author_email__ = "me@kennethreitz.org" 12 | __license__ = "Apache 2.0" 13 | __copyright__ = "Copyright Kenneth Reitz" 14 | __cake__ = "\u2728 \U0001f370 \u2728" 15 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/requests/_internal_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests._internal_utils 3 | ~~~~~~~~~~~~~~ 4 | 5 | Provides utility functions that are consumed internally by Requests 6 | which depend on extremely few external helpers (such as compat) 7 | """ 8 | import re 9 | 10 | from .compat import builtin_str 11 | 12 | _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") 13 | _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") 14 | _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") 15 | _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") 16 | 17 | _HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) 18 | _HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) 19 | HEADER_VALIDATORS = { 20 | bytes: _HEADER_VALIDATORS_BYTE, 21 | str: _HEADER_VALIDATORS_STR, 22 | } 23 | 24 | 25 | def to_native_string(string, encoding="ascii"): 26 | """Given a string object, regardless of type, returns a representation of 27 | that string in the native string type, encoding and decoding where 28 | necessary. This assumes ASCII unless told otherwise. 29 | """ 30 | if isinstance(string, builtin_str): 31 | out = string 32 | else: 33 | out = string.decode(encoding) 34 | 35 | return out 36 | 37 | 38 | def unicode_is_ascii(u_string): 39 | """Determine if unicode string only contains ASCII characters. 40 | 41 | :param str u_string: unicode string to check. Must be unicode 42 | and not Python 2 `str`. 43 | :rtype: bool 44 | """ 45 | assert isinstance(u_string, str) 46 | try: 47 | u_string.encode("ascii") 48 | return True 49 | except UnicodeEncodeError: 50 | return False 51 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | requests.certs 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module returns the preferred default CA certificate bundle. There is 8 | only one — the one from the certifi package. 9 | 10 | If you are packaging Requests, e.g., for a Linux distribution or a managed 11 | environment, you can change the definition of where() to return a separately 12 | packaged CA bundle. 13 | """ 14 | from certifi import where 15 | 16 | if __name__ == "__main__": 17 | print(where()) 18 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/requests/compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.compat 3 | ~~~~~~~~~~~~~~~ 4 | 5 | This module previously handled import compatibility issues 6 | between Python 2 and Python 3. It remains for backwards 7 | compatibility until the next major version. 8 | """ 9 | 10 | try: 11 | import chardet 12 | except ImportError: 13 | import charset_normalizer as chardet 14 | 15 | import sys 16 | 17 | # ------- 18 | # Pythons 19 | # ------- 20 | 21 | # Syntax sugar. 22 | _ver = sys.version_info 23 | 24 | #: Python 2.x? 25 | is_py2 = _ver[0] == 2 26 | 27 | #: Python 3.x? 28 | is_py3 = _ver[0] == 3 29 | 30 | # json/simplejson module import resolution 31 | has_simplejson = False 32 | try: 33 | import simplejson as json 34 | 35 | has_simplejson = True 36 | except ImportError: 37 | import json 38 | 39 | if has_simplejson: 40 | from simplejson import JSONDecodeError 41 | else: 42 | from json import JSONDecodeError 43 | 44 | # Keep OrderedDict for backwards compatibility. 45 | from collections import OrderedDict 46 | from collections.abc import Callable, Mapping, MutableMapping 47 | from http import cookiejar as cookielib 48 | from http.cookies import Morsel 49 | from io import StringIO 50 | 51 | # -------------- 52 | # Legacy Imports 53 | # -------------- 54 | from urllib.parse import ( 55 | quote, 56 | quote_plus, 57 | unquote, 58 | unquote_plus, 59 | urldefrag, 60 | urlencode, 61 | urljoin, 62 | urlparse, 63 | urlsplit, 64 | urlunparse, 65 | ) 66 | from urllib.request import ( 67 | getproxies, 68 | getproxies_environment, 69 | parse_http_list, 70 | proxy_bypass, 71 | proxy_bypass_environment, 72 | ) 73 | 74 | builtin_str = str 75 | str = str 76 | bytes = bytes 77 | basestring = (str, bytes) 78 | numeric_types = (int, float) 79 | integer_types = (int,) 80 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/requests/hooks.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.hooks 3 | ~~~~~~~~~~~~~~ 4 | 5 | This module provides the capabilities for the Requests hooks system. 6 | 7 | Available hooks: 8 | 9 | ``response``: 10 | The response generated from a Request. 11 | """ 12 | HOOKS = ["response"] 13 | 14 | 15 | def default_hooks(): 16 | return {event: [] for event in HOOKS} 17 | 18 | 19 | # TODO: response is the only one 20 | 21 | 22 | def dispatch_hook(key, hooks, hook_data, **kwargs): 23 | """Dispatches a hook dictionary on a given piece of data.""" 24 | hooks = hooks or {} 25 | hooks = hooks.get(key) 26 | if hooks: 27 | if hasattr(hooks, "__call__"): 28 | hooks = [hooks] 29 | for hook in hooks: 30 | _hook_data = hook(hook_data, **kwargs) 31 | if _hook_data is not None: 32 | hook_data = _hook_data 33 | return hook_data 34 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/_version.py: -------------------------------------------------------------------------------- 1 | # This file is protected via CODEOWNERS 2 | __version__ = "1.26.18" 3 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/vendored/urllib3/contrib/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/contrib/_appengine_environ.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides means to detect the App Engine environment. 3 | """ 4 | 5 | import os 6 | 7 | 8 | def is_appengine(): 9 | return is_local_appengine() or is_prod_appengine() 10 | 11 | 12 | def is_appengine_sandbox(): 13 | """Reports if the app is running in the first generation sandbox. 14 | 15 | The second generation runtimes are technically still in a sandbox, but it 16 | is much less restrictive, so generally you shouldn't need to check for it. 17 | see https://cloud.google.com/appengine/docs/standard/runtimes 18 | """ 19 | return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" 20 | 21 | 22 | def is_local_appengine(): 23 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 24 | "SERVER_SOFTWARE", "" 25 | ).startswith("Development/") 26 | 27 | 28 | def is_prod_appengine(): 29 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 30 | "SERVER_SOFTWARE", "" 31 | ).startswith("Google App Engine/") 32 | 33 | 34 | def is_prod_appengine_mvms(): 35 | """Deprecated.""" 36 | return False 37 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/contrib/_securetransport/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/packages/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/vendored/urllib3/packages/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/packages/backports/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/src/snowflake/connector/vendored/urllib3/packages/backports/__init__.py -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/packages/backports/makefile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | backports.makefile 4 | ~~~~~~~~~~~~~~~~~~ 5 | 6 | Backports the Python 3 ``socket.makefile`` method for use with anything that 7 | wants to create a "fake" socket object. 8 | """ 9 | import io 10 | from socket import SocketIO 11 | 12 | 13 | def backport_makefile( 14 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None 15 | ): 16 | """ 17 | Backport of ``socket.makefile`` from Python 3.5. 18 | """ 19 | if not set(mode) <= {"r", "w", "b"}: 20 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) 21 | writing = "w" in mode 22 | reading = "r" in mode or not writing 23 | assert reading or writing 24 | binary = "b" in mode 25 | rawmode = "" 26 | if reading: 27 | rawmode += "r" 28 | if writing: 29 | rawmode += "w" 30 | raw = SocketIO(self, rawmode) 31 | self._makefile_refs += 1 32 | if buffering is None: 33 | buffering = -1 34 | if buffering < 0: 35 | buffering = io.DEFAULT_BUFFER_SIZE 36 | if buffering == 0: 37 | if not binary: 38 | raise ValueError("unbuffered streams must be binary") 39 | return raw 40 | if reading and writing: 41 | buffer = io.BufferedRWPair(raw, raw, buffering) 42 | elif reading: 43 | buffer = io.BufferedReader(raw, buffering) 44 | else: 45 | assert writing 46 | buffer = io.BufferedWriter(raw, buffering) 47 | if binary: 48 | return buffer 49 | text = io.TextIOWrapper(buffer, encoding, errors, newline) 50 | text.mode = mode 51 | return text 52 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/util/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | # For backwards compatibility, provide imports that used to be here. 4 | from .connection import is_connection_dropped 5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers 6 | from .response import is_fp_closed 7 | from .retry import Retry 8 | from .ssl_ import ( 9 | ALPN_PROTOCOLS, 10 | HAS_SNI, 11 | IS_PYOPENSSL, 12 | IS_SECURETRANSPORT, 13 | PROTOCOL_TLS, 14 | SSLContext, 15 | assert_fingerprint, 16 | resolve_cert_reqs, 17 | resolve_ssl_version, 18 | ssl_wrap_socket, 19 | ) 20 | from .timeout import Timeout, current_time 21 | from .url import Url, get_host, parse_url, split_first 22 | from .wait import wait_for_read, wait_for_write 23 | 24 | __all__ = ( 25 | "HAS_SNI", 26 | "IS_PYOPENSSL", 27 | "IS_SECURETRANSPORT", 28 | "SSLContext", 29 | "PROTOCOL_TLS", 30 | "ALPN_PROTOCOLS", 31 | "Retry", 32 | "Timeout", 33 | "Url", 34 | "assert_fingerprint", 35 | "current_time", 36 | "is_connection_dropped", 37 | "is_fp_closed", 38 | "get_host", 39 | "parse_url", 40 | "make_headers", 41 | "resolve_cert_reqs", 42 | "resolve_ssl_version", 43 | "split_first", 44 | "ssl_wrap_socket", 45 | "wait_for_read", 46 | "wait_for_write", 47 | "SKIP_HEADER", 48 | "SKIPPABLE_HEADERS", 49 | ) 50 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/util/proxy.py: -------------------------------------------------------------------------------- 1 | from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version 2 | 3 | 4 | def connection_requires_http_tunnel( 5 | proxy_url=None, proxy_config=None, destination_scheme=None 6 | ): 7 | """ 8 | Returns True if the connection requires an HTTP CONNECT through the proxy. 9 | 10 | :param URL proxy_url: 11 | URL of the proxy. 12 | :param ProxyConfig proxy_config: 13 | Proxy configuration from poolmanager.py 14 | :param str destination_scheme: 15 | The scheme of the destination. (i.e https, http, etc) 16 | """ 17 | # If we're not using a proxy, no way to use a tunnel. 18 | if proxy_url is None: 19 | return False 20 | 21 | # HTTP destinations never require tunneling, we always forward. 22 | if destination_scheme == "http": 23 | return False 24 | 25 | # Support for forwarding with HTTPS proxies and HTTPS destinations. 26 | if ( 27 | proxy_url.scheme == "https" 28 | and proxy_config 29 | and proxy_config.use_forwarding_for_https 30 | ): 31 | return False 32 | 33 | # Otherwise always use a tunnel. 34 | return True 35 | 36 | 37 | def create_proxy_ssl_context( 38 | ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None 39 | ): 40 | """ 41 | Generates a default proxy ssl context if one hasn't been provided by the 42 | user. 43 | """ 44 | ssl_context = create_urllib3_context( 45 | ssl_version=resolve_ssl_version(ssl_version), 46 | cert_reqs=resolve_cert_reqs(cert_reqs), 47 | ) 48 | 49 | if ( 50 | not ca_certs 51 | and not ca_cert_dir 52 | and not ca_cert_data 53 | and hasattr(ssl_context, "load_default_certs") 54 | ): 55 | ssl_context.load_default_certs() 56 | 57 | return ssl_context 58 | -------------------------------------------------------------------------------- /src/snowflake/connector/vendored/urllib3/util/queue.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | from ..packages import six 4 | from ..packages.six.moves import queue 5 | 6 | if six.PY2: 7 | # Queue is imported for side effects on MS Windows. See issue #229. 8 | import Queue as _unused_module_Queue # noqa: F401 9 | 10 | 11 | class LifoQueue(queue.Queue): 12 | def _init(self, _): 13 | self.queue = collections.deque() 14 | 15 | def _qsize(self, len=len): 16 | return len(self.queue) 17 | 18 | def _put(self, item): 19 | self.queue.append(item) 20 | 21 | def _get(self): 22 | return self.queue.pop() 23 | -------------------------------------------------------------------------------- /src/snowflake/connector/version.py: -------------------------------------------------------------------------------- 1 | # Update this for the versions 2 | # Don't change the forth version number from None 3 | VERSION = (3, 15, 0, None) 4 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | # This file houses functions and constants shared by both integration and unit tests 4 | import os 5 | 6 | CLOUD_PROVIDERS = {"aws", "azure", "gcp"} 7 | EXTERNAL_SKIP_TAGS = {"internal"} 8 | INTERNAL_SKIP_TAGS = {"external"} 9 | RUNNING_ON_GH = os.getenv("GITHUB_ACTIONS") == "true" 10 | 11 | 12 | def running_on_public_ci() -> bool: 13 | """Whether or not tests are currently running on one of our public CIs.""" 14 | return RUNNING_ON_GH 15 | -------------------------------------------------------------------------------- /test/auth/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/auth/__init__.py -------------------------------------------------------------------------------- /test/auth/test_key_pair.py: -------------------------------------------------------------------------------- 1 | from test.auth.authorization_parameters import ( 2 | AuthConnectionParameters, 3 | get_rsa_private_key_for_key_pair, 4 | ) 5 | from test.auth.authorization_test_helper import AuthorizationTestHelper 6 | 7 | import pytest 8 | 9 | 10 | @pytest.mark.auth 11 | def test_key_pair_successful(): 12 | connection_parameters = ( 13 | AuthConnectionParameters().get_key_pair_connection_parameters() 14 | ) 15 | connection_parameters["private_key"] = get_rsa_private_key_for_key_pair( 16 | "SNOWFLAKE_AUTH_TEST_PRIVATE_KEY_PATH" 17 | ) 18 | 19 | test_helper = AuthorizationTestHelper(connection_parameters) 20 | assert ( 21 | test_helper.connect_and_execute_simple_query() 22 | ), "Failed to connect with Snowflake" 23 | assert test_helper.error_msg == "", "Error message should be empty" 24 | 25 | 26 | @pytest.mark.auth 27 | def test_key_pair_invalid_key(): 28 | connection_parameters = ( 29 | AuthConnectionParameters().get_key_pair_connection_parameters() 30 | ) 31 | connection_parameters["private_key"] = get_rsa_private_key_for_key_pair( 32 | "SNOWFLAKE_AUTH_TEST_INVALID_PRIVATE_KEY_PATH" 33 | ) 34 | 35 | test_helper = AuthorizationTestHelper(connection_parameters) 36 | assert ( 37 | not test_helper.connect_and_execute_simple_query() 38 | ), "Connection to Snowflake should not be established" 39 | assert "JWT token is invalid" in test_helper.get_error_msg() 40 | -------------------------------------------------------------------------------- /test/data/TestOrcFile.test1.orc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/TestOrcFile.test1.orc -------------------------------------------------------------------------------- /test/data/brotli_sample.txt.br: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/brotli_sample.txt.br -------------------------------------------------------------------------------- /test/data/bzip2_sample.txt.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/bzip2_sample.txt.bz2 -------------------------------------------------------------------------------- /test/data/cert_tests/production/addtrust.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU 3 | MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs 4 | IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 5 | MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux 6 | FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h 7 | bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v 8 | dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt 9 | H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 10 | uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX 11 | mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX 12 | a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN 13 | E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 14 | WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD 15 | VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 16 | Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU 17 | cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx 18 | IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN 19 | AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH 20 | YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 21 | 6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC 22 | Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX 23 | c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a 24 | mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= 25 | -----END CERTIFICATE----- 26 | -------------------------------------------------------------------------------- /test/data/gzip_sample.txt.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/gzip_sample.txt.gz -------------------------------------------------------------------------------- /test/data/multiple_statements.sql: -------------------------------------------------------------------------------- 1 | select 1; 2 | select 2; 3 | select 3; 4 | -------------------------------------------------------------------------------- /test/data/multiple_statements_negative.sql: -------------------------------------------------------------------------------- 1 | select 987; 2 | select 1345 3 | select 9876; 4 | -------------------------------------------------------------------------------- /test/data/nation.impala.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/nation.impala.parquet -------------------------------------------------------------------------------- /test/data/put_get_1.txt: -------------------------------------------------------------------------------- 1 | 1,2014-01-02,2014-01-02 11:30:21,2014-01-02 11:30:22,2014-01-02 11:30:23,2014-01-02T11:30:24-07:00,8.765,9.876 2 | 2,2014-02-02,2014-02-02 11:30:21,2014-02-02 11:30:22,2014-02-02 11:30:23,2014-02-02T11:30:24+02:00,8.764,9.875 3 | 3,2014-03-02,2014-03-02 11:30:21,2014-03-02 11:30:22,2014-03-02 11:30:23,2014-03-02T11:30:24Z,8.763,9.874 4 | -------------------------------------------------------------------------------- /test/data/rsa_keys/private.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAy9QkFIGxs8oXnuUKeIzTNJ3l1aFIfoUuIiRtLJ1XwmyPYHnL 3 | jC0yye3smMmctx6BcXTV9E0ebf8a0sENhSDmThjFM62baNka23Pzo6cSSSGbT2m1 4 | NQbARKa4dNP7zkWIPHa2tuK1/jRCy6Z/ARTdkPgYa4Xr0br/vL3QoZ/sy2ieeT2U 5 | 4Xa03jAghU9VgFYkIp3hpI6aTaDmKG8Z5mVjovBpW8Rg0vkkwZ3GhjhAJhr6qwMo 6 | TSgkQU/Xst0X8duO/HD7bH9NYpsySMiU4+lRsrCC0rhiCToT36kidynajEJI6uQo 7 | TQzsPtFM+Nz0Vd1+dZfJ1H+ZyIROyVXlCKhRCQIDAQABAoIBABgee0J98lQvBsqD 8 | mdCYAWoJgSfdVOG7yrC5lL2hxL+57uFgqChnNMpWQBf4S3YTwwd05thow8AKXtcv 9 | hvUI5pe5MKSj1275ucbcGqlz3KMQufsUrB+hM+ErxUn1x6cp7SwSdB6CBz0UXA8T 10 | HZY4LNu5r9IRX81KjnZ8E5L8hJ2xBl/39rHEJjtPYxYtYxUH/6xuQmR+fpfYZHbW 11 | +VOMWiuXLLM04UJvNd8MCvUbLOR00NSxKE75H8mytLNW2onRhV40ZyY9KI00T/Sh 12 | TnTV8PLIsmL2QRjOFPgOZmTfHbJy/1U50Pv50PD93jILRPuRw6plwXAR+zHOUFf8 13 | KTtutN0CgYEA8knMesYyAO3ustNDo4saDhPxccAhl5kiMS0ZuFDchweyhZshStSh 14 | 6OMxppmVW0fgbzuGSk+SnZ1VELq4W8U4Ma3kU+vKSCqdB78gmrTEUEw06GPXAMIK 15 | OXCf0k8s9x1g5zy8Lp70RqHGVYRQ7I9FaK3wOYCGUNg9jW4w4R9OvBcCgYEA110m 16 | m3xCfcCZr8w9odW04g0GdrvTeUTrGtAbktG5Zat4xYjtrOBRethiWnKoCzS6H1+1 17 | ex5REbBGwVsgBL5H0aWCA1ngRYuMTltEYV8pkW+r7PjHbnKOs4JeXhPu8GNFR6kI 18 | 1stZv01kyIQ+r0/LxcGImt1ApVfhieu3R5jv798CgYEA2qR4T07truMIJf36KuqL 19 | T6r1lmYAqnJ5ZZFChY8LAxuSIXl1mLa7iZMEqkgsfRwmz5oSvJqEP5HMQvWNCtu1 20 | szcuNgExIOC7plL21EVOZvvuPsxiKH/yhBU7VKk4daPgX6vyUcwFunugpGlsMZ20 21 | 0BsvDmUKUIhjO2710yHShm8CgYEAyFEOfgVAW3NwAAJ7OTJsAu6Wmo3uD5g5DkGP 22 | xCpqxdSOiz3nxKsJXrgyE2sePv4WOORvC0uRoGBUD7lQuNi8OJY3riJE5qr8IE3n 23 | 9qHrKimywsJLzcGkVffnCp6D2vibEiJ55EBVBAlaut+25p3ULebrxxAQ9zocwIqN 24 | eX0ZgAUCgYARMUbgmE30hzOFSm/JuNfQqK4uj6Je1qgNKSs6AUkDwZ1i3eduVD6v 25 | chdK2/CuHQqZUBJa19d7oxdF+czplbP3IRCvEzOb92Tk8N99KzEsdFbWJ2UF/CEn 26 | yiZMcE6FWiZS0iVVE8MAQumtQqphFoBla0we92/MG2rLi3aVB7gzgw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /test/data/rsa_keys/privatekey2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpgIBAAKCAQEAzA/Ws3OvcV0uUN+2Q+wH4JLGwD/gabXz7OYyw49it1bgvUMl 3 | mKssxmaWYEQ3h/uGAtKn5pahKK3a/P3e8LrDfwl8On5WWgSlvF3WwbtXrJDT9UYT 4 | jjakjBxngooE9gh9BJdkb/kLUL0MulERsUf5oGBPnWK4tyr3TXCTcBis9dnU09Q7 5 | 1QGAIgYe/eaXda2xqOUZLkAfewTQ2AqGL1SKlOl5Xpk9zsBgMseYuoEAe92w9YNn 6 | 0g41wRukPvCt/z6/J9b26x/+DF2Du4PpeZeX1Qij5VgbHmiUut9QIiymV+bSC0+y 7 | Kfe5Lwt8QR4oyuEVmbHe6bHUiVtWiahiU8sRJwIDAQABAoIBAQCChp8OqjDOkovN 8 | r0smpxNi0n/O+QzSkVA1eAmAjXbXTvryFME3pkY9oeEOMpRSptBRfe1n0XHEU3B/ 9 | 4uN3l/70g1yzDZyud8qLcnqr8OljuD/b47cegFVASerr1NzXgxZ0mWHlPae/PS2m 10 | /7QRcbh6nSBPy5Xbk+Ab6KZmTWJU3pv6I2mNO+1vJYQN0XRssfHRXgwE8Z+o5pNV 11 | hp6trE/9kkmlofN313ik/OhRh5/vothO6YBl8aGCyfXBP0oe5HykPn94Q6g1Pjye 12 | tu0D0onF2vUyhYJFJKAqArxS3gjqw2TFRRVOLn0c1UqLjVOhMxNyreCA0Vjkycka 13 | A8QidwoBAoGBAPQ14wG5tRrT48+TmCQohrqsuP4x7efPJtbySMP53ZDTSkqtRDiY 14 | s1NromqBvoZKYWJjQZUpTnYIill17mnbyRfUtvSdDoAFjMfD6BK75QwHcj+Do7k0 15 | 8ET9SbIw+B6YqrSohxeL+6V7PZncjHNUUOmF1A8jxBLuMfoJGQezwGFjAoGBANXp 16 | xNgZgZUdjd6KOVTTwfEaK4j9HgMjCP3UZsPX/Kg0tc6YcWX7W7QUNFeQA9YYEGL7 17 | Ycc7AiVo9iyfQYNFLZIqgeRERzrYdiVRL+U78P9XuGzl1zxTinP/0GZPoJ+I1Tql 18 | XJHmqF5Img2TUvvrxi1pD7jRvYamSL02OMtYKB5tAoGBAIlJvfubVgfl9N0eIVq6 19 | ebt6HGmy0gcOesw92tBpqLihP9Opgn+6wJ12FhGALQa7y/GO0TC0zwomFPjkBor2 20 | Zx+BkvUDUkKAXDt1lALsJIDVstGoBKO68hQmvIZzZxl0RAtZ7tA6/tZx6RiVV1QC 21 | o5YUiTmVpsXpqKji8nJVCCL3AoGBAL1BwX2sO2CxsDYnNwgc/icRj1j5EReXZLom 22 | tPEq5Afx8rShXRrsY7sUcSbTbQF7cuwQGvdI9uIgVkUudajtiJ8caqWJ3zdqEkV7 23 | 6hM3kEZp1urKAz8Fbqaouzn501OynWzwptETMP2R4HKIWNA4TrMYk+dh8fvynMKC 24 | +Ya3LW+VAoGBAIgtjbpm0MIetzb7fmtqEK+PMmaPsdt4rKvq/JgTwMobfYuE1s3C 25 | NTJVa7VEPlmiByoVzW4IZcDXEoWoSTqgRT2JpBTddoNWGgJH0R0pkb3GvbD7IRAe 26 | sZ4B6lduXtnXGXZe1D3yWz9mCAJygUsMKUhb1WVrjsh7LQFLUAq0AK4I 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /test/data/rsa_keys/public.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy9QkFIGxs8oXnuUKeIzT 3 | NJ3l1aFIfoUuIiRtLJ1XwmyPYHnLjC0yye3smMmctx6BcXTV9E0ebf8a0sENhSDm 4 | ThjFM62baNka23Pzo6cSSSGbT2m1NQbARKa4dNP7zkWIPHa2tuK1/jRCy6Z/ARTd 5 | kPgYa4Xr0br/vL3QoZ/sy2ieeT2U4Xa03jAghU9VgFYkIp3hpI6aTaDmKG8Z5mVj 6 | ovBpW8Rg0vkkwZ3GhjhAJhr6qwMoTSgkQU/Xst0X8duO/HD7bH9NYpsySMiU4+lR 7 | srCC0rhiCToT36kidynajEJI6uQoTQzsPtFM+Nz0Vd1+dZfJ1H+ZyIROyVXlCKhR 8 | CQIDAQAB 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /test/data/rsa_keys/publickey2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzA/Ws3OvcV0uUN+2Q+wH 3 | 4JLGwD/gabXz7OYyw49it1bgvUMlmKssxmaWYEQ3h/uGAtKn5pahKK3a/P3e8LrD 4 | fwl8On5WWgSlvF3WwbtXrJDT9UYTjjakjBxngooE9gh9BJdkb/kLUL0MulERsUf5 5 | oGBPnWK4tyr3TXCTcBis9dnU09Q71QGAIgYe/eaXda2xqOUZLkAfewTQ2AqGL1SK 6 | lOl5Xpk9zsBgMseYuoEAe92w9YNn0g41wRukPvCt/z6/J9b26x/+DF2Du4PpeZeX 7 | 1Qij5VgbHmiUut9QIiymV+bSC0+yKfe5Lwt8QR4oyuEVmbHe6bHUiVtWiahiU8sR 8 | JwIDAQAB 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /test/data/rsa_keys/rsa_key_encrypted.p8: -------------------------------------------------------------------------------- 1 | -----BEGIN ENCRYPTED PRIVATE KEY----- 2 | MIIE6TAbBgkqhkiG9w0BBQMwDgQI6xVPyzHaGbYCAggABIIEyJ40XLJ4ifNL5iu0 3 | dYv0ksu7JtO8tBl/02DNED0P8T/mF8miJrfWq7C++4bGgRnJgeo/g4REi7GifLwg 4 | kRiNGibwfGVXhvJYiHDUCuGlhTe+haNEOO3RfNc6I0YL0uDrgIxlu6sGu9QGvdfr 5 | UOD+zRWlhJKIxTuXLRQb7sXQUAr1Xjkg+mSCaG4jnit37LSaWxUYu7FZYYpONKYG 6 | iwErlzLhSfNiIRkaWwd5SDeXOMSsVrSbLm/4PtQoB5tCZMYNRQXySl7Sn35R0xiI 7 | n2wzOhUkjeaXUjjmVYoRImKrq5bShMYR0xJHSj5jNggfDqATYj7Y7sa5ZEmSR+T1 8 | OCdV6ARKnwRnHmuTgNvuCqsT1qipxGdhtxSlO7RyDJIPC32jJWKhH8pDrwTVqZrb 9 | ytsDOjravQclf5l2rmBsChI0oNiE3ZmhF8O05T+5CUkcpkKTwQssH4zp0ctUfYCC 10 | 9lrSOLEzIKePuKvKsU1wNKZbbJlYN7svlDW0LwISc9VS9cwKs2s8yNTqTykg1CSs 11 | eGTZBtoG9yPatLl4CoSe1O1CQ09eVX+03OPj7wFGIuw8S/odXaVhlziIcBeL0MCv 12 | hTHMyMJiyI3A57n+Q/644Q8Hw6g29dGoLtR+rACfNoeE+cBgs0Iq0FMkXEQe5Ae5 13 | QxvE0gpgH/KFUiuA8RjIC4wtkjBPCFNA8lsVIgGNEbCF7QmMB7exg1Orm4FxjYRv 14 | M4KAQLjZEeGHaLp+3zXtUazl+34GWuaQlfmvjgg0MOL8R5mKIAKU4yeze2twQ2CZ 15 | yrqZgZOsbXsBcAxe6/5x3NqHKJ2ZN/kqHFnRHIIX1R8U9TE9zmfJ3THYjH+pYS9P 16 | 4CPhfanfNbQxQCJIctR/dvUiyIkk+fji3peidlKVgcYFFs9XB5CInzx4o3hOt6n5 17 | R5vVsFgUSesqzfBm1JKn8Pkl5hXvhTHJx0QL9CoRJ+M1MMIUN94vaDywjX/RTJp+ 18 | syaSW6CMmQkGhhfZKv7P9egHwyvUvsWQKvATLFlG7pJcEhIrnY7zDWUXvlHWAV+b 19 | 4ohN9B1ds2BR+rV9WcikEZVBHZLK8HxkmOmyppWZJd3V8kHRNGKClZgF0+0TTcyl 20 | nQQl8U76vUtQx9gx2bbB8jZ3AWxU3mknnoG8cnBVH67XA4jnusWZDSrydoBiFw1l 21 | wSDsFmYIlxYdg+KsAm2bciYZu/QUkk6EBw38zOgsKG++QB9C6SGVer98Lehb9I82 22 | PD9lp5ca7Q0iCd8ynMTUY47nabLvYydQ3iv6vi00m83tu4gBjEvIceg1cqzNz/Tv 23 | ISL+rpqJl+zm+aCob3fQIrm/MgFZTbP5EKDIE1UDOyS94v1H+L9H+Jr2VNLVgC6B 24 | SS9W7+gz0CjhKgC1N2uxcw1dZ2fRUarmPHBgCizJWIkxHMWOtQnOlvltIWI9eeZU 25 | f3SYrEPaTF0cRpRZGFXf9JDiDTYVKFh+RO5y2QmZszxZTIpQoI0lN8J6mpUqQLlJ 26 | agdfrPiG29Kv1NR5Os++Z/5J9JncNF/BdAoALqQXynwAl0GWpNNlV8TYyP1YuOvP 27 | Q0DvGtuiXBOxZj4u55ba5kM0QE5X26Z5LwXEeZBFvM35wwlLh20rAcPJnaL+aqrP 28 | zTOyyzibyoLzbvnk7A== 29 | -----END ENCRYPTED PRIVATE KEY----- 30 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/authorization_code/browser_timeout_authorization_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Browser Authorization timeout", 5 | "request": { 6 | "urlPathPattern": "/oauth/authorize.*", 7 | "method": "GET" 8 | }, 9 | "response": { 10 | "status": 200, 11 | "fixedDelayMilliseconds": 5000 12 | } 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/authorization_code/invalid_scope_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Invalid scope authorization error", 5 | "request": { 6 | "urlPathPattern": "/oauth/authorize.*", 7 | "method": "GET" 8 | }, 9 | "response": { 10 | "status": 302, 11 | "headers": { 12 | "Location": "http://localhost:8009/snowflake/oauth-redirect?error=invalid_scope&error_description=One+or+more+scopes+are+not+configured+for+the+authorization+server+resource." 13 | } 14 | } 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/authorization_code/invalid_state_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Invalid scope authorization error", 5 | "request": { 6 | "urlPathPattern": "/oauth/authorize.*", 7 | "method": "GET" 8 | }, 9 | "response": { 10 | "status": 302, 11 | "headers": { 12 | "Location": "http://localhost:8009/snowflake/oauth-redirect?code=123&state=invalidstate" 13 | } 14 | } 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/authorization_code/new_tokens_after_failed_refresh.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Authorized", 3 | "newScenarioState": "Acquired access token", 4 | "request": { 5 | "urlPathPattern": "/oauth/token-request.*", 6 | "method": "POST", 7 | "headers": { 8 | "Authorization": { 9 | "contains": "Basic" 10 | }, 11 | "Content-Type": { 12 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 13 | } 14 | }, 15 | "bodyPatterns": [ 16 | { 17 | "matches": "^grant_type=authorization_code&code=123&redirect_uri=http%3A%2F%2Flocalhost%3A([0-9]+)%2Fsnowflake%2Foauth-redirect&code_verifier=abc123$" 18 | } 19 | ] 20 | }, 21 | "response": { 22 | "status": 200, 23 | "jsonBody": { 24 | "access_token": "access-token-123", 25 | "refresh_token": "refresh-token-123", 26 | "token_type": "Bearer", 27 | "username": "user", 28 | "scope": "refresh_token session:role:ANALYST", 29 | "expires_in": 600, 30 | "refresh_token_expires_in": 86399, 31 | "idpInitiated": false 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/authorization_code/successful_auth_after_failed_refresh.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Failed refresh token attempt", 3 | "newScenarioState": "Authorized", 4 | "request": { 5 | "urlPathPattern": "/oauth/authorize", 6 | "queryParameters": { 7 | "response_type": { 8 | "equalTo": "code" 9 | }, 10 | "scope": { 11 | "equalTo": "session:role:ANALYST offline_access" 12 | }, 13 | "code_challenge_method": { 14 | "equalTo": "S256" 15 | }, 16 | "redirect_uri": { 17 | "equalTo": "http://localhost:8009/snowflake/oauth-redirect" 18 | }, 19 | "code_challenge": { 20 | "matches": ".*" 21 | }, 22 | "state": { 23 | "matches": ".*" 24 | }, 25 | "client_id": { 26 | "equalTo": "123" 27 | } 28 | }, 29 | "method": "GET" 30 | }, 31 | "response": { 32 | "status": 302, 33 | "headers": { 34 | "Location": "http://localhost:8009/snowflake/oauth-redirect?code=123&state=abc123" 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/client_credentials/successful_auth_after_failed_refresh.json: -------------------------------------------------------------------------------- 1 | { 2 | "scenarioName": "Successful OAuth client credentials flow", 3 | "requiredScenarioState": "Started", 4 | "newScenarioState": "Acquired access token", 5 | "request": { 6 | "urlPathPattern": "/oauth/token-request.*", 7 | "method": "POST", 8 | "headers": { 9 | "Authorization": { 10 | "contains": "Basic" 11 | }, 12 | "Content-Type": { 13 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 14 | } 15 | }, 16 | "bodyPatterns": [ 17 | { 18 | "contains": "grant_type=client_credentials&scope=session%3Arole%3AANALYST" 19 | } 20 | ] 21 | }, 22 | "response": { 23 | "status": 200, 24 | "jsonBody": { 25 | "access_token": "access-token-123", 26 | "refresh_token": "refresh-token-123", 27 | "token_type": "Bearer", 28 | "username": "user", 29 | "scope": "refresh_token session:role:ANALYST", 30 | "expires_in": 600, 31 | "refresh_token_expires_in": 86399, 32 | "idpInitiated": false 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/client_credentials/successful_flow.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Successful OAuth client credentials flow", 5 | "requiredScenarioState": "Started", 6 | "newScenarioState": "Acquired access token", 7 | "request": { 8 | "urlPathPattern": "/oauth/token-request.*", 9 | "method": "POST", 10 | "headers": { 11 | "Authorization": { 12 | "contains": "Basic" 13 | }, 14 | "Content-Type": { 15 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 16 | } 17 | }, 18 | "bodyPatterns": [ 19 | { 20 | "contains": "grant_type=client_credentials&scope=session%3Arole%3AANALYST" 21 | } 22 | ] 23 | }, 24 | "response": { 25 | "status": 200, 26 | "jsonBody": { 27 | "access_token": "access-token-123", 28 | "refresh_token": "123", 29 | "token_type": "Bearer", 30 | "username": "user", 31 | "scope": "refresh_token session:role:ANALYST", 32 | "expires_in": 600, 33 | "refresh_token_expires_in": 86399, 34 | "idpInitiated": false 35 | } 36 | } 37 | } 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/client_credentials/token_request_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "OAuth client credentials flow with token request error", 5 | "requiredScenarioState": "Started", 6 | "newScenarioState": "Acquired access token", 7 | "request": { 8 | "urlPathPattern": "/oauth/token-request.*", 9 | "method": "POST", 10 | "headers": { 11 | "Authorization": { 12 | "contains": "Basic" 13 | }, 14 | "Content-Type": { 15 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 16 | } 17 | }, 18 | "bodyPatterns": [ 19 | { 20 | "contains": "grant_type=client_credentials&scope=session%3Arole%3AANALYST" 21 | } 22 | ] 23 | }, 24 | "response": { 25 | "status": 400 26 | } 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/refresh_token/refresh_failed.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Expired access token", 3 | "newScenarioState": "Failed refresh token attempt", 4 | "request": { 5 | "urlPathPattern": "/oauth/token-request.*", 6 | "method": "POST", 7 | "headers": { 8 | "Authorization": { 9 | "contains": "Basic" 10 | }, 11 | "Content-Type": { 12 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 13 | } 14 | }, 15 | "bodyPatterns": [ 16 | { 17 | "contains": "grant_type=refresh_token&refresh_token=expired-refresh-token-123&scope=session%3Arole%3AANALYST+offline_access" 18 | } 19 | ] 20 | }, 21 | "response": { 22 | "status": 400, 23 | "jsonBody": { 24 | "error": "invalid_grant", 25 | "error_description": "Unknown or invalid refresh token." 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/oauth/refresh_token/refresh_successful.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Expired access token", 3 | "newScenarioState": "Acquired access token", 4 | "request": { 5 | "urlPathPattern": "/oauth/token-request.*", 6 | "method": "POST", 7 | "headers": { 8 | "Authorization": { 9 | "contains": "Basic" 10 | }, 11 | "Content-Type": { 12 | "contains": "application/x-www-form-urlencoded; charset=UTF-8" 13 | } 14 | }, 15 | "bodyPatterns": [ 16 | { 17 | "contains": "grant_type=refresh_token&refresh_token=refresh-token-123&scope=session%3Arole%3AANALYST+offline_access" 18 | } 19 | ] 20 | }, 21 | "response": { 22 | "status": 200, 23 | "jsonBody": { 24 | "access_token": "access-token-123", 25 | "token_type": "Bearer", 26 | "expires_in": 599, 27 | "idpInitiated": false 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/auth/pat/invalid_token.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Invalid PAT authentication flow", 5 | "requiredScenarioState": "Started", 6 | "newScenarioState": "Authentication failed", 7 | "request": { 8 | "urlPathPattern": "/session/v1/login-request.*", 9 | "method": "POST", 10 | "bodyPatterns": [ 11 | { 12 | "equalToJson" : { 13 | "data": { 14 | "AUTHENTICATOR": "PROGRAMMATIC_ACCESS_TOKEN", 15 | "TOKEN": "some PAT" 16 | } 17 | }, 18 | "ignoreExtraElements" : true 19 | } 20 | ] 21 | }, 22 | "response": { 23 | "status": 200, 24 | "jsonBody": { 25 | "data": { 26 | "nextAction": "RETRY_LOGIN", 27 | "authnMethod": "PAT", 28 | "signInOptions": {} 29 | }, 30 | "code": "394400", 31 | "message": "Programmatic access token is invalid.", 32 | "success": false, 33 | "headers": null 34 | } 35 | } 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/generic/snowflake_disconnect_successful.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Connected", 3 | "newScenarioState": "Disconnected", 4 | "request": { 5 | "urlPathPattern": "/session", 6 | "method": "POST", 7 | "queryParameters": { 8 | "delete": { 9 | "matches": "true" 10 | } 11 | } 12 | }, 13 | "response": { 14 | "status": 200, 15 | "jsonBody": { 16 | "code": 200, 17 | "message": "done", 18 | "success": true 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/generic/snowflake_login_failed.json: -------------------------------------------------------------------------------- 1 | { 2 | "mappings": [ 3 | { 4 | "scenarioName": "Refresh expired access token", 5 | "requiredScenarioState": "Started", 6 | "newScenarioState": "Expired access token", 7 | "request": { 8 | "urlPathPattern": "/session/v1/login-request", 9 | "method": "POST", 10 | "queryParameters": { 11 | "request_id": { 12 | "matches": ".*" 13 | }, 14 | "roleName": { 15 | "equalTo": "ANALYST" 16 | } 17 | }, 18 | "headers": { 19 | "Content-Type": { 20 | "contains": "application/json" 21 | } 22 | }, 23 | "bodyPatterns": [ 24 | { 25 | "matchesJsonPath": "$.data" 26 | }, 27 | { 28 | "matchesJsonPath": "$[?(@.data.TOKEN==\"expired-access-token-123\")]" 29 | } 30 | ] 31 | }, 32 | "response": { 33 | "status": 200, 34 | "jsonBody": { 35 | "data": { 36 | "nextAction": "RETRY_LOGIN", 37 | "authnMethod": "OAUTH", 38 | "signInOptions": {} 39 | }, 40 | "code": "390318", 41 | "message": "OAuth access token expired. [1172527951366]", 42 | "success": false, 43 | "headers": null 44 | } 45 | } 46 | } 47 | ] 48 | } 49 | -------------------------------------------------------------------------------- /test/data/wiremock/mappings/generic/snowflake_login_successful.json: -------------------------------------------------------------------------------- 1 | { 2 | "requiredScenarioState": "Acquired access token", 3 | "newScenarioState": "Connected", 4 | "request": { 5 | "urlPathPattern": "/session/v1/login-request", 6 | "method": "POST", 7 | "queryParameters": { 8 | "request_id": { 9 | "matches": ".*" 10 | }, 11 | "roleName": { 12 | "equalTo": "ANALYST" 13 | } 14 | }, 15 | "headers": { 16 | "Content-Type": { 17 | "contains": "application/json" 18 | } 19 | }, 20 | "bodyPatterns": [ 21 | { 22 | "matchesJsonPath": "$.data" 23 | }, 24 | { 25 | "matchesJsonPath": "$[?(@.data.TOKEN==\"access-token-123\")]" 26 | } 27 | ] 28 | }, 29 | "response": { 30 | "status": 200, 31 | "fixedDelayMilliseconds": "1000", 32 | "jsonBody": { 33 | "data": { 34 | "masterToken": "token-m1", 35 | "token": "token-t1", 36 | "validityInSeconds": 3599, 37 | "masterValidityInSeconds": 14400, 38 | "displayUserName": "***", 39 | "serverVersion": "***", 40 | "firstLogin": false, 41 | "remMeToken": null, 42 | "remMeValidityInSeconds": 0, 43 | "healthCheckInterval": 45, 44 | "newClientForUpgrade": null, 45 | "sessionId": 1313, 46 | "parameters": [], 47 | "sessionInfo": { 48 | "databaseName": null, 49 | "schemaName": null, 50 | "warehouseName": "TEST", 51 | "roleName": "ACCOUNTADMIN" 52 | }, 53 | "idToken": null, 54 | "idTokenValidityInSeconds": 0, 55 | "responseData": null, 56 | "mfaToken": null, 57 | "mfaTokenValidityInSeconds": 0 58 | }, 59 | "code": null, 60 | "message": null, 61 | "success": true 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /test/data/zstd_sample.txt.zst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/data/zstd_sample.txt.zst -------------------------------------------------------------------------------- /test/extras/README.md: -------------------------------------------------------------------------------- 1 | # Extra tests that should run separately 2 | 3 | ## Running tests 4 | 5 | These are tests that test weird edge cases when we need a standalone Python environment 6 | and process. 7 | 8 | Run only these tests with `tox`, for example: `tox -e py38-extras` from the 9 | top directory. 10 | -------------------------------------------------------------------------------- /test/extras/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/extras/__init__.py -------------------------------------------------------------------------------- /test/extras/run.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | import platform 4 | import subprocess 5 | import sys 6 | 7 | import snowflake.connector.ocsp_snowflake 8 | 9 | # This script run every Python file in this directory other than this 10 | # one in a subprocess and checks their exit codes 11 | 12 | 13 | file_ignore_list = ["run.py", "__init__.py"] 14 | 15 | for test_file in pathlib.Path(__file__).parent.glob("*.py"): 16 | if test_file.name not in file_ignore_list: 17 | print(f"Running {test_file}") 18 | sub_process = subprocess.run( 19 | [ 20 | sys.executable if sys.executable else "python", 21 | "-m", 22 | f"test.extras.{test_file.name[:-3]}", 23 | ] 24 | ) 25 | sub_process.check_returncode() 26 | ocsp_cache_dir_path = pathlib.Path( 27 | snowflake.connector.ocsp_snowflake.OCSP_RESPONSE_VALIDATION_CACHE.file_path 28 | ).parent 29 | cache_files = set(os.listdir(ocsp_cache_dir_path)) 30 | # This is to test SNOW-79940, making sure tmp files are removed 31 | # Windows does not have ocsp_response_validation_cache.lock 32 | assert ( 33 | cache_files 34 | == { 35 | "ocsp_response_validation_cache.json.lock", 36 | "ocsp_response_validation_cache.json", 37 | "ocsp_response_cache.json", 38 | } 39 | and not platform.system() == "Windows" 40 | ) or ( 41 | cache_files 42 | == { 43 | "ocsp_response_validation_cache.json", 44 | "ocsp_response_cache.json", 45 | } 46 | and platform.system() == "Windows" 47 | ), str( 48 | cache_files 49 | ) 50 | -------------------------------------------------------------------------------- /test/extras/simple_select1.py: -------------------------------------------------------------------------------- 1 | from snowflake.connector import connect 2 | 3 | from ..parameters import CONNECTION_PARAMETERS 4 | 5 | with connect(**CONNECTION_PARAMETERS) as conn: 6 | with conn.cursor() as cur: 7 | assert cur.execute("select 1;").fetchall() == [ 8 | (1,), 9 | ] 10 | -------------------------------------------------------------------------------- /test/integ/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/integ/__init__.py -------------------------------------------------------------------------------- /test/integ/lambda/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/integ/lambda/__init__.py -------------------------------------------------------------------------------- /test/integ/lambda/test_basic_query.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | def test_connection(conn_cnx): 5 | """Test basic connection.""" 6 | with conn_cnx() as cnx: 7 | cur = cnx.cursor() 8 | result = cur.execute("select 1;").fetchall() 9 | assert result == [(1,)] 10 | 11 | 12 | def test_large_resultset(conn_cnx): 13 | """Test large resultset.""" 14 | with conn_cnx() as cnx: 15 | cur = cnx.cursor() 16 | result = cur.execute( 17 | "select seq8(), randstr(1000, random()) from table(generator(rowcount=>10000));" 18 | ).fetchall() 19 | assert len(result) == 10000 20 | -------------------------------------------------------------------------------- /test/integ/pandas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/integ/pandas/__init__.py -------------------------------------------------------------------------------- /test/integ/pandas/test_error_arrow_pandas_stream.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ...helpers import ( 4 | _arrow_error_stream_chunk_remove_random_length_bytes_test, 5 | _arrow_error_stream_chunk_remove_single_byte_test, 6 | _arrow_error_stream_random_input_test, 7 | ) 8 | 9 | pytestmark = pytest.mark.skipolddriver 10 | 11 | 12 | def test_connector_error_base64_stream_chunk_remove_single_byte(): 13 | _arrow_error_stream_chunk_remove_single_byte_test(use_table_iterator=True) 14 | 15 | 16 | def test_connector_error_base64_stream_chunk_remove_random_length_bytes(): 17 | _arrow_error_stream_chunk_remove_random_length_bytes_test(use_table_iterator=True) 18 | 19 | 20 | def test_connector_error_random_input(): 21 | _arrow_error_stream_random_input_test(use_table_iterator=True) 22 | -------------------------------------------------------------------------------- /test/integ/pandas/test_logging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import logging 5 | 6 | 7 | def test_rand_table_log(caplog, conn_cnx, db_parameters): 8 | with conn_cnx() as conn: 9 | caplog.set_level(logging.DEBUG, "snowflake.connector") 10 | 11 | num_of_rows = 10 12 | with conn.cursor() as cur: 13 | cur.execute( 14 | "select randstr(abs(mod(random(), 100)), random()) from table(generator(rowcount => {}));".format( 15 | num_of_rows 16 | ) 17 | ).fetchall() 18 | 19 | # make assertions 20 | has_batch_read = has_batch_size = has_chunk_info = has_batch_index = False 21 | for record in caplog.records: 22 | if "Batches read:" in record.msg: 23 | has_batch_read = True 24 | assert "arrow_iterator" in record.filename 25 | assert "__cinit__" in record.funcName 26 | 27 | if "Arrow BatchSize:" in record.msg: 28 | has_batch_size = True 29 | assert "CArrowIterator.cpp" in record.filename 30 | assert "CArrowIterator" in record.funcName 31 | 32 | if "Arrow chunk info:" in record.msg: 33 | has_chunk_info = True 34 | assert "CArrowChunkIterator.cpp" in record.filename 35 | assert "CArrowChunkIterator" in record.funcName 36 | 37 | if "Current batch index:" in record.msg: 38 | has_batch_index = True 39 | assert "CArrowChunkIterator.cpp" in record.filename 40 | assert "next" in record.funcName 41 | 42 | # each of these records appear at least once in records 43 | assert has_batch_read and has_batch_size and has_chunk_info and has_batch_index 44 | -------------------------------------------------------------------------------- /test/integ/pandas/test_unit_options.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from unittest import mock 5 | 6 | import pytest 7 | 8 | try: 9 | from snowflake.connector.options import ( 10 | MissingPandas, 11 | _import_or_missing_pandas_option, 12 | ) 13 | except ImportError: 14 | MissingPandas = None 15 | _import_or_missing_pandas_option = None 16 | 17 | from importlib.metadata import PackageNotFoundError, distribution 18 | 19 | 20 | @pytest.mark.skipif( 21 | MissingPandas is None or _import_or_missing_pandas_option is None, 22 | reason="No snowflake.connector.options is available. It can be the case if running old driver tests", 23 | ) 24 | def test_pandas_option_reporting(caplog): 25 | """Tests for the weird case where someone can import pyarrow, but setuptools doesn't know about it. 26 | 27 | This issue was brought to attention in: https://github.com/snowflakedb/snowflake-connector-python/issues/412 28 | """ 29 | 30 | def modified_distribution(name, *args, **kwargs): 31 | if name in ["pyarrow", "snowflake-connector-python"]: 32 | raise PackageNotFoundError("TestErrorMessage") 33 | return distribution(name, *args, **kwargs) 34 | 35 | with mock.patch( 36 | "snowflake.connector.options.distribution", 37 | wraps=modified_distribution, 38 | ): 39 | caplog.set_level(logging.DEBUG, "snowflake.connector") 40 | pandas, pyarrow, installed_pandas = _import_or_missing_pandas_option() 41 | assert installed_pandas 42 | assert not isinstance(pandas, MissingPandas) 43 | assert not isinstance(pyarrow, MissingPandas) 44 | assert ( 45 | "Cannot determine if compatible pyarrow is installed because of missing package(s)" 46 | in caplog.text 47 | ) 48 | assert "TestErrorMessage" in caplog.text 49 | -------------------------------------------------------------------------------- /test/integ/sso/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/integ/sso/__init__.py -------------------------------------------------------------------------------- /test/integ/test_cursor_context_manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from logging import getLogger 5 | 6 | 7 | def test_context_manager(conn_testaccount, db_parameters): 8 | """Tests context Manager support in Cursor.""" 9 | logger = getLogger(__name__) 10 | 11 | def tables(conn): 12 | with conn.cursor() as cur: 13 | cur.execute("show tables") 14 | name_to_idx = {elem[0]: idx for idx, elem in enumerate(cur.description)} 15 | for row in cur: 16 | yield row[name_to_idx["name"]] 17 | 18 | try: 19 | conn_testaccount.cursor().execute( 20 | "create or replace table {} (a int)".format(db_parameters["name"]) 21 | ) 22 | all_tables = [ 23 | rec 24 | for rec in tables(conn_testaccount) 25 | if rec == db_parameters["name"].upper() 26 | ] 27 | logger.info("tables: %s", all_tables) 28 | assert len(all_tables) == 1, "number of tables" 29 | finally: 30 | conn_testaccount.cursor().execute( 31 | "drop table if exists {}".format(db_parameters["name"]) 32 | ) 33 | -------------------------------------------------------------------------------- /test/integ/test_pickle_timestamp_tz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import os 5 | import pickle 6 | 7 | 8 | def test_pickle_timestamp_tz(tmpdir, conn_cnx): 9 | """Ensures the timestamp_tz result is pickle-able.""" 10 | tmp_dir = str(tmpdir.mkdir("pickles")) 11 | output = os.path.join(tmp_dir, "tz.pickle") 12 | expected_tz = None 13 | with conn_cnx() as con: 14 | for rec in con.cursor().execute( 15 | "select '2019-08-11 01:02:03.123 -03:00'::TIMESTAMP_TZ" 16 | ): 17 | expected_tz = rec[0] 18 | with open(output, "wb") as f: 19 | pickle.dump(expected_tz, f) 20 | 21 | with open(output, "rb") as f: 22 | read_tz = pickle.load(f) 23 | assert expected_tz == read_tz 24 | -------------------------------------------------------------------------------- /test/integ/test_put_windows_path.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import os 5 | 6 | 7 | def test_abc(conn_cnx, tmpdir, db_parameters): 8 | """Tests PUTing a file on Windows using the URI and Windows path.""" 9 | import pathlib 10 | 11 | tmp_dir = str(tmpdir.mkdir("data")) 12 | test_data = os.path.join(tmp_dir, "data.txt") 13 | with open(test_data, "w") as f: 14 | f.write("test1,test2") 15 | f.write("test3,test4") 16 | 17 | fileURI = pathlib.Path(test_data).as_uri() 18 | 19 | subdir = db_parameters["name"] 20 | with conn_cnx( 21 | user=db_parameters["user"], 22 | account=db_parameters["account"], 23 | password=db_parameters["password"], 24 | ) as con: 25 | rec = con.cursor().execute(f"put {fileURI} @~/{subdir}0/").fetchall() 26 | assert rec[0][6] == "UPLOADED" 27 | 28 | rec = con.cursor().execute(f"put file://{test_data} @~/{subdir}1/").fetchall() 29 | assert rec[0][6] == "UPLOADED" 30 | 31 | con.cursor().execute(f"rm @~/{subdir}0") 32 | con.cursor().execute(f"rm @~/{subdir}1") 33 | -------------------------------------------------------------------------------- /test/integ/test_results.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import pytest 5 | 6 | from snowflake.connector import ProgrammingError 7 | 8 | 9 | def test_results(conn_cnx): 10 | """Gets results for the given qid.""" 11 | with conn_cnx() as cnx: 12 | cur = cnx.cursor() 13 | cur.execute("select * from values(1,2),(3,4)") 14 | sfqid = cur.sfqid 15 | cur = cur.query_result(sfqid) 16 | got_sfqid = cur.sfqid 17 | assert cur.fetchall() == [(1, 2), (3, 4)] 18 | assert sfqid == got_sfqid 19 | 20 | 21 | def test_results_with_error(conn_cnx): 22 | """Gets results with error.""" 23 | with conn_cnx() as cnx: 24 | cur = cnx.cursor() 25 | with pytest.raises(ProgrammingError) as e: 26 | cur.execute("select blah") 27 | sfqid = e.value.sfqid 28 | 29 | with pytest.raises(ProgrammingError) as e: 30 | cur.query_result(sfqid) 31 | got_sfqid = e.value.sfqid 32 | 33 | assert sfqid is not None 34 | assert got_sfqid is not None 35 | assert got_sfqid == sfqid 36 | -------------------------------------------------------------------------------- /test/integ/test_reuse_cursor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | def test_reuse_cursor(conn_cnx, db_parameters): 3 | """Ensures only the last executed command/query's result sets are returned.""" 4 | with conn_cnx() as cnx: 5 | c = cnx.cursor() 6 | c.execute( 7 | "create or replace table {name}(c1 string)".format( 8 | name=db_parameters["name"] 9 | ) 10 | ) 11 | try: 12 | c.execute( 13 | "insert into {name} values('123'),('456'),('678')".format( 14 | name=db_parameters["name"] 15 | ) 16 | ) 17 | c.execute("show tables") 18 | c.execute("select current_date()") 19 | rec = c.fetchone() 20 | assert len(rec) == 1, "number of records is wrong" 21 | c.execute( 22 | "select * from {name} order by 1".format(name=db_parameters["name"]) 23 | ) 24 | recs = c.fetchall() 25 | assert c.description[0][0] == "C1", "fisrt column name" 26 | assert len(recs) == 3, "number of records is wrong" 27 | finally: 28 | c.execute("drop table if exists {name}".format(name=db_parameters["name"])) 29 | -------------------------------------------------------------------------------- /test/integ/test_statement_parameter_binding.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from datetime import datetime 5 | 6 | import pytest 7 | import pytz 8 | 9 | try: 10 | from parameters import CONNECTION_PARAMETERS_ADMIN 11 | except ImportError: 12 | CONNECTION_PARAMETERS_ADMIN = {} 13 | 14 | 15 | @pytest.mark.skipif( 16 | not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." 17 | ) 18 | def test_binding_security(conn_cnx): 19 | """Tests binding statement parameters.""" 20 | expected_qa_mode_datetime = datetime(1967, 6, 23, 7, 0, 0, 123000, pytz.UTC) 21 | 22 | with conn_cnx() as cnx: 23 | cnx.cursor().execute("alter session set timezone='UTC'") 24 | with cnx.cursor() as cur: 25 | cur.execute("show databases like 'TESTDB'") 26 | rec = cur.fetchone() 27 | assert rec[0] != expected_qa_mode_datetime 28 | 29 | with cnx.cursor() as cur: 30 | cur.execute( 31 | "show databases like 'TESTDB'", 32 | _statement_params={ 33 | "QA_MODE": True, 34 | }, 35 | ) 36 | rec = cur.fetchone() 37 | assert rec[0] == expected_qa_mode_datetime 38 | 39 | with cnx.cursor() as cur: 40 | cur.execute("show databases like 'TESTDB'") 41 | rec = cur.fetchone() 42 | assert rec[0] != expected_qa_mode_datetime 43 | -------------------------------------------------------------------------------- /test/integ/test_vendored_urllib.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pytest 3 | 4 | try: 5 | from snowflake.connector.vendored import urllib3 6 | 7 | vendored_imported = True 8 | except ModuleNotFoundError: 9 | vendored_imported = False 10 | 11 | 12 | @pytest.mark.skipolddriver(reason="vendored library is not imported for old driver") 13 | def test_local_fix_for_closed_socket_bug(): 14 | # https://github.com/urllib3/urllib3/issues/1878#issuecomment-641534573 15 | http = urllib3.PoolManager(maxsize=1) 16 | 17 | def _execute_request(): 18 | resp = http.request( 19 | method="GET", url="http://httpbin.org", preload_content=False 20 | ) 21 | resp._connection.sock.close() 22 | resp.release_conn() 23 | resp.close() 24 | return resp 25 | 26 | _execute_request() 27 | try: 28 | _execute_request() 29 | except ValueError as e: 30 | if "file descriptor cannot be a negative" in str(e): 31 | raise AssertionError( 32 | "Second _execute_request failed. See linked github issue comment" 33 | ) 34 | else: 35 | raise e 36 | -------------------------------------------------------------------------------- /test/integ_helpers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | import os 5 | from typing import TYPE_CHECKING 6 | 7 | if TYPE_CHECKING: # pragma: no cover 8 | from snowflake.connector.cursor import SnowflakeCursor 9 | 10 | 11 | def put( 12 | csr: SnowflakeCursor, 13 | file_path: str, 14 | stage_path: str, 15 | from_path: bool, 16 | sql_options: str | None = "", 17 | **kwargs, 18 | ) -> SnowflakeCursor: 19 | """Execute PUT query with given cursor. 20 | 21 | Args: 22 | csr: Snowflake cursor object. 23 | file_path: Path to the target file in local system; Or . when from_path is False. 24 | stage_path: Destination path of file on the stage. 25 | from_path: Whether the target file is fetched with given path, specify file_stream= if False. 26 | sql_options: Optional arguments to the PUT command. 27 | **kwargs: Optional arguments passed to SnowflakeCursor.execute() 28 | 29 | Returns: 30 | A result class with the results in it. This can either be json, or an arrow result class. 31 | """ 32 | sql = "put 'file://{file}' @{stage} {sql_options}" 33 | if from_path: 34 | kwargs.pop("file_stream", None) 35 | else: 36 | # PUT from stream 37 | file_path = os.path.basename(file_path) 38 | if kwargs.pop("commented", False): 39 | sql = "--- test comments\n" + sql 40 | sql = sql.format( 41 | file=file_path.replace("\\", "\\\\"), stage=stage_path, sql_options=sql_options 42 | ) 43 | return csr.execute(sql, **kwargs) 44 | -------------------------------------------------------------------------------- /test/lazy_var.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Callable, Generic, TypeVar 4 | 5 | T = TypeVar("T") 6 | 7 | 8 | class LazyVar(Generic[T]): 9 | """Our implementation of a lazy variable. 10 | 11 | Mostly used for when we want to implement a shared variable between tests (should be calculated at most once), 12 | but only if necessary. 13 | """ 14 | 15 | def __init__(self, generator: Callable[[], T]): 16 | """Initializes a lazy variable. 17 | 18 | Args: 19 | generator: A function that takes no arguments and generates the actual variable. 20 | """ 21 | self.value = None 22 | self.generator = generator 23 | 24 | def get(self) -> T: 25 | if self.value is None: 26 | self.value = self.generator() 27 | return self.value 28 | -------------------------------------------------------------------------------- /test/randomize.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module was added back to the repository for compatibility with the old driver tests that 3 | rely on random_string from this file for functionality. 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | import random 9 | import string 10 | from typing import Sequence 11 | 12 | 13 | def random_string( 14 | length: int, 15 | prefix: str = "", 16 | suffix: str = "", 17 | choices: Sequence[str] = string.ascii_lowercase, 18 | ) -> str: 19 | """Our convenience function to generate random string for object names. 20 | Args: 21 | length: How many random characters to choose from choices. 22 | prefix: Prefix to add to random string generated. 23 | suffix: Suffix to add to random string generated. 24 | choices: A generator of things to choose from. 25 | """ 26 | random_part = "".join([random.choice(choices) for _ in range(length)]) 27 | return "".join([prefix, random_part, suffix]) 28 | -------------------------------------------------------------------------------- /test/stress/README.md: -------------------------------------------------------------------------------- 1 | ## quick start for performance testing 2 | 3 | 4 | ### setup 5 | 6 | note: you need to put your own credentials into parameters.py 7 | 8 | ```bash 9 | git clone git@github.com:snowflakedb/snowflake-connector-python.git 10 | cd snowflake-connector-python/test/stress 11 | pip install -r dev_requirements.txt 12 | touch parameters.py # set your own connection parameters 13 | ``` 14 | 15 | ### run unit perf test 16 | 17 | This test will use the test dataset stored in the "stress_test_data" folder. 18 | check the read me in the folder to see what datasets are available. 19 | 20 | ```python 21 | python local_iterator.py 22 | ``` 23 | 24 | ### run e2e perf test 25 | 26 | This test will run query against snowflake. update the script to prepare the data and run the test. 27 | 28 | ```python 29 | python e2e_iterator.py 30 | ``` 31 | -------------------------------------------------------------------------------- /test/stress/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/stress/__init__.py -------------------------------------------------------------------------------- /test/stress/dev_requirements.txt: -------------------------------------------------------------------------------- 1 | psutil 2 | ../.. 3 | matplotlib 4 | -------------------------------------------------------------------------------- /test/stress/stress_test_data/README.md: -------------------------------------------------------------------------------- 1 | # Test Data Description 2 | 3 | ## test_data_all_types 4 | 5 | This dataset contains 1 batch, 100 rows of data, and the schema of the data contains 27 columns. 6 | 7 | ### sample row data: 8 | 9 | ```python 10 | ( 11 | 123456, 12 | bytearray(b'HELP'), 13 | True, 14 | 'a', 15 | 'b', 16 | datetime.date(2023, 7, 18), 17 | datetime.datetime(2023, 7, 18, 12, 51), 18 | Decimal('984.280'), 19 | Decimal('268.350'), 20 | 123.456, 21 | 738.132, 22 | 6789, 23 | 23456, 24 | 12583, 25 | 513.431, 26 | 10, 27 | 9, 28 | 'fjisfsj', 29 | 'wkdoajde131', 30 | datetime.time(12, 34, 56), 31 | datetime.datetime(2021, 1, 1, 0, 0), 32 | datetime.datetime(2021, 1, 1, 0, 0, tzinfo=), 33 | datetime.datetime(2020, 12, 31, 16, 0, tzinfo=), 34 | datetime.datetime(2021, 1, 1, 0, 0), 35 | 1, 36 | bytearray(b'HELP'), 37 | 'vxlmls!21321#@!#!' 38 | ) 39 | ``` 40 | 41 | ## test_multi_column_row_decimal_data 42 | 43 | This dataset contains 9 batches, each batch has approximately ~1700 rows of data, and the schema of the data contains 19 columns. 44 | 45 | ### sample row data: 46 | ```python 47 | ( 48 | datetime.date(2021, 1, 3), 49 | 8371, 50 | 'segment_no_0', 51 | 1, 52 | 7, 53 | 2, 54 | Decimal('0.285714'), 55 | Decimal('1.000'), 56 | Decimal('7.000'), 57 | Decimal('2.000'), 58 | Decimal('0.285714000'), 59 | Decimal('1.000'), 60 | Decimal('7.000'), 61 | Decimal('2.000'), 62 | Decimal('0.285714000'), 63 | Decimal('1.000'), 64 | Decimal('7.000'), 65 | Decimal('2.000'), 66 | Decimal('0.285714000') 67 | ) 68 | ``` 69 | -------------------------------------------------------------------------------- /test/stress/stress_test_data/test_multi_column_row_decimal_data: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/stress/stress_test_data/test_multi_column_row_decimal_data -------------------------------------------------------------------------------- /test/stress/util.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import psutil 4 | 5 | process = psutil.Process() 6 | 7 | SAMPLE_RATE = 10 # record data evey SAMPLE_RATE execution 8 | 9 | 10 | def task_execution_decorator(func, perf_file, memory_file): 11 | count = 0 12 | 13 | def wrapper(*args, **kwargs): 14 | start = time.time() 15 | func(*args, **kwargs) 16 | memory_usage = ( 17 | process.memory_info().rss / 1024 / 1024 18 | ) # rss is of unit bytes, we get unit in MB 19 | period = time.time() - start 20 | nonlocal count 21 | if count % SAMPLE_RATE == 0: 22 | perf_file.write(str(period) + "\n") 23 | print(f"execution time {count}") 24 | print(f"memory usage: {memory_usage} MB") 25 | print(f"execution time: {period} s") 26 | memory_file.write(str(memory_usage) + "\n") 27 | count += 1 28 | 29 | return wrapper 30 | -------------------------------------------------------------------------------- /test/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/unit/__init__.py -------------------------------------------------------------------------------- /test/unit/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from snowflake.connector.telemetry_oob import TelemetryService 6 | 7 | from ..csp_helpers import ( 8 | FakeAwsEnvironment, 9 | FakeAzureFunctionMetadataService, 10 | FakeAzureVmMetadataService, 11 | FakeGceMetadataService, 12 | NoMetadataService, 13 | ) 14 | 15 | 16 | @pytest.fixture(autouse=True, scope="session") 17 | def disable_oob_telemetry(): 18 | oob_telemetry_service = TelemetryService.get_instance() 19 | original_state = oob_telemetry_service.enabled 20 | oob_telemetry_service.disable() 21 | yield None 22 | if original_state: 23 | oob_telemetry_service.enable() 24 | 25 | 26 | @pytest.fixture 27 | def no_metadata_service(): 28 | """Emulates an environment without any metadata service.""" 29 | with NoMetadataService() as server: 30 | yield server 31 | 32 | 33 | @pytest.fixture 34 | def fake_aws_environment(): 35 | """Emulates the AWS environment, returning dummy credentials.""" 36 | with FakeAwsEnvironment() as env: 37 | yield env 38 | 39 | 40 | @pytest.fixture( 41 | params=[FakeAzureFunctionMetadataService(), FakeAzureVmMetadataService()], 42 | ids=["azure_function", "azure_vm"], 43 | ) 44 | def fake_azure_metadata_service(request): 45 | """Parameterized fixture that emulates both the Azure VM and Azure Functions metadata services.""" 46 | with request.param as server: 47 | yield server 48 | 49 | 50 | @pytest.fixture 51 | def fake_gce_metadata_service(): 52 | """Emulates the GCE metadata service, returning a dummy token.""" 53 | with FakeGceMetadataService() as server: 54 | yield server 55 | -------------------------------------------------------------------------------- /test/unit/mock_utils.py: -------------------------------------------------------------------------------- 1 | import time 2 | from unittest.mock import MagicMock 3 | 4 | try: 5 | from snowflake.connector.vendored.requests.exceptions import ConnectionError 6 | except ImportError: 7 | from requests.exceptions import ConnectionError 8 | 9 | try: 10 | from snowflake.connector.auth.by_plugin import DEFAULT_AUTH_CLASS_TIMEOUT 11 | except ImportError: 12 | DEFAULT_AUTH_CLASS_TIMEOUT = 120 13 | 14 | 15 | def zero_backoff(): 16 | while True: 17 | yield 0 18 | 19 | 20 | try: 21 | from snowflake.connector.connection import DEFAULT_BACKOFF_POLICY 22 | except ImportError: 23 | DEFAULT_BACKOFF_POLICY = zero_backoff 24 | 25 | 26 | def mock_connection( 27 | login_timeout=DEFAULT_AUTH_CLASS_TIMEOUT, 28 | network_timeout=None, 29 | socket_timeout=None, 30 | backoff_policy=DEFAULT_BACKOFF_POLICY, 31 | disable_saml_url_check=False, 32 | ): 33 | return MagicMock( 34 | _login_timeout=login_timeout, 35 | login_timeout=login_timeout, 36 | _network_timeout=network_timeout, 37 | network_timeout=network_timeout, 38 | _socket_timeout=socket_timeout, 39 | socket_timeout=socket_timeout, 40 | _backoff_policy=backoff_policy, 41 | backoff_policy=backoff_policy, 42 | _disable_saml_url_check=disable_saml_url_check, 43 | ) 44 | 45 | 46 | def mock_request_with_action(next_action, sleep=None): 47 | def mock_request(*args, **kwargs): 48 | if sleep is not None: 49 | time.sleep(sleep) 50 | if next_action == "RETRY": 51 | return MagicMock( 52 | status_code=503, 53 | close=lambda: None, 54 | ) 55 | elif next_action == "ERROR": 56 | raise ConnectionError() 57 | 58 | return mock_request 59 | -------------------------------------------------------------------------------- /test/unit/test_auth_mfa.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from snowflake.connector import connect 4 | 5 | 6 | def test_mfa_token_cache(): 7 | with mock.patch( 8 | "snowflake.connector.network.SnowflakeRestful.fetch", 9 | ): 10 | with mock.patch( 11 | "snowflake.connector.auth._auth.Auth._write_temporary_credential", 12 | ) as save_mock: 13 | with connect( 14 | account="account", 15 | user="user", 16 | password="password", 17 | authenticator="username_password_mfa", 18 | client_store_temporary_credential=True, 19 | client_request_mfa_token=True, 20 | ): 21 | assert save_mock.called 22 | with mock.patch( 23 | "snowflake.connector.network.SnowflakeRestful.fetch", 24 | return_value={ 25 | "data": { 26 | "token": "abcd", 27 | "masterToken": "defg", 28 | }, 29 | "success": True, 30 | }, 31 | ): 32 | with mock.patch( 33 | "snowflake.connector.cursor.SnowflakeCursor._init_result_and_meta", 34 | ): 35 | with mock.patch( 36 | "snowflake.connector.auth._auth.Auth._read_temporary_credential", 37 | return_value=None, 38 | ) as load_mock: 39 | with connect( 40 | account="account", 41 | user="user", 42 | password="password", 43 | authenticator="username_password_mfa", 44 | client_store_temporary_credential=True, 45 | client_request_mfa_token=True, 46 | ): 47 | assert load_mock.called 48 | -------------------------------------------------------------------------------- /test/unit/test_auth_no_auth.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.skipolddriver 7 | def test_auth_no_auth(): 8 | """Simple test for AuthNoAuth.""" 9 | 10 | # AuthNoAuth does not exist in old drivers, so we import at test level to 11 | # skip importing it for old driver tests. 12 | from snowflake.connector.auth.no_auth import AuthNoAuth 13 | 14 | auth = AuthNoAuth() 15 | 16 | body = {"data": {}} 17 | old_body = body 18 | auth.update_body(body) 19 | # update_body should be no-op for SP auth, therefore the body content should remain the same. 20 | assert body == old_body, f"body is {body}, old_body is {old_body}" 21 | 22 | # assertion_content should always return None in SP auth. 23 | assert auth.assertion_content is None, auth.assertion_content 24 | 25 | # reauthenticate should always return success. 26 | expected_reauth_response = {"success": True} 27 | reauth_response = auth.reauthenticate() 28 | assert ( 29 | reauth_response == expected_reauth_response 30 | ), f"reauthenticate() is expected to return {expected_reauth_response}, but returns {reauth_response}" 31 | 32 | # It also returns success response even if we pass extra keyword argument(s). 33 | reauth_response = auth.reauthenticate(foo="bar") 34 | assert ( 35 | reauth_response == expected_reauth_response 36 | ), f'reauthenticate(foo="bar") is expected to return {expected_reauth_response}, but returns {reauth_response}' 37 | -------------------------------------------------------------------------------- /test/unit/test_auth_oauth.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | try: # pragma: no cover 5 | from snowflake.connector.auth import AuthByOAuth 6 | except ImportError: 7 | from snowflake.connector.auth_oauth import AuthByOAuth 8 | 9 | 10 | def test_auth_oauth(): 11 | """Simple OAuth test.""" 12 | token = "oAuthToken" 13 | auth = AuthByOAuth(token) 14 | body = {"data": {}} 15 | auth.update_body(body) 16 | assert body["data"]["TOKEN"] == token, body 17 | assert body["data"]["AUTHENTICATOR"] == "OAUTH", body 18 | -------------------------------------------------------------------------------- /test/unit/test_binaryformat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from snowflake.connector.sfbinaryformat import ( 5 | SnowflakeBinaryFormat, 6 | binary_to_python, 7 | binary_to_snowflake, 8 | ) 9 | 10 | 11 | def test_basic(): 12 | """Test hex and base64 formatting.""" 13 | # Hex 14 | fmt = SnowflakeBinaryFormat("heX") 15 | assert fmt.format(b"") == "" 16 | assert fmt.format(b"\x00") == "00" 17 | assert fmt.format(b"\xAB\xCD\x12") == "ABCD12" 18 | assert fmt.format(b"\x00\xFF\x42\x01") == "00FF4201" 19 | 20 | # Base64 21 | fmt = SnowflakeBinaryFormat("BasE64") 22 | assert fmt.format(b"") == "" 23 | assert fmt.format(b"\x00") == "AA==" 24 | assert fmt.format(b"\xAB\xCD\x12") == "q80S" 25 | assert fmt.format(b"\x00\xFF\x42\x01") == "AP9CAQ==" 26 | 27 | 28 | def test_binary_to_python(): 29 | """Test conversion to Python data type.""" 30 | assert binary_to_python("") == b"" 31 | assert binary_to_python("00") == b"\x00" 32 | assert binary_to_python("ABCD12") == b"\xAB\xCD\x12" 33 | 34 | 35 | def test_binary_to_snowflake(): 36 | """Test conversion for passing to Snowflake.""" 37 | assert binary_to_snowflake(b"") == b"" 38 | assert binary_to_snowflake(b"\x00") == b"00" 39 | assert binary_to_snowflake(b"\xAB\xCD\x12") == b"ABCD12" 40 | -------------------------------------------------------------------------------- /test/unit/test_bind_upload_agent.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from unittest import mock 5 | from unittest.mock import MagicMock 6 | 7 | 8 | def test_bind_upload_agent_uploading_multiple_files(): 9 | from snowflake.connector.bind_upload_agent import BindUploadAgent 10 | 11 | csr = MagicMock(auto_spec=True) 12 | rows = [bytes(10)] * 10 13 | agent = BindUploadAgent(csr, rows, stream_buffer_size=10) 14 | agent.upload() 15 | assert csr.execute.call_count == 1 # 1 for stage creation 16 | assert csr._upload_stream.call_count == 10 # 10 for 10 files 17 | 18 | 19 | def test_bind_upload_agent_row_size_exceed_buffer_size(): 20 | from snowflake.connector.bind_upload_agent import BindUploadAgent 21 | 22 | csr = MagicMock(auto_spec=True) 23 | rows = [bytes(15)] * 10 24 | agent = BindUploadAgent(csr, rows, stream_buffer_size=10) 25 | agent.upload() 26 | assert csr.execute.call_count == 1 # 1 for stage creation 27 | assert csr._upload_stream.call_count == 10 # 10 for 10 files 28 | 29 | 30 | def test_bind_upload_agent_scoped_temp_object(): 31 | from snowflake.connector.bind_upload_agent import BindUploadAgent 32 | 33 | csr = MagicMock(auto_spec=True) 34 | rows = [bytes(15)] * 10 35 | agent = BindUploadAgent(csr, rows, stream_buffer_size=10) 36 | with mock.patch.object(agent, "_use_scoped_temp_object", new=True): 37 | with mock.patch.object(agent.cursor, "execute") as mock_execute: 38 | agent._create_stage() 39 | assert ( 40 | "create or replace SCOPED TEMPORARY stage" 41 | in mock_execute.call_args[0][0] 42 | ) 43 | -------------------------------------------------------------------------------- /test/unit/test_compute_chunk_size.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytestmark = pytest.mark.skipolddriver 4 | 5 | 6 | def test_check_chunk_size(): 7 | from snowflake.connector.constants import ( 8 | S3_MAX_OBJECT_SIZE, 9 | S3_MAX_PART_SIZE, 10 | S3_MIN_PART_SIZE, 11 | ) 12 | from snowflake.connector.file_transfer_agent import _chunk_size_calculator 13 | 14 | expected_chunk_size = 8 * 1024**2 15 | sample_file_size_2gb = 2 * 1024**3 16 | sample_file_size_under_5tb = 4.9 * 1024**4 17 | sample_file_size_6tb = 6 * 1024**4 18 | sample_chunk_size_4mb = 4 * 1024**2 19 | 20 | chunk_size_1 = _chunk_size_calculator(sample_file_size_2gb) 21 | assert chunk_size_1 == expected_chunk_size 22 | 23 | chunk_size_2 = _chunk_size_calculator(int(sample_file_size_under_5tb)) 24 | assert chunk_size_2 <= S3_MAX_PART_SIZE 25 | 26 | with pytest.raises(ValueError) as exc: 27 | _chunk_size_calculator(sample_file_size_6tb) 28 | assert ( 29 | f"File size {sample_file_size_6tb} exceeds the maximum file size {S3_MAX_OBJECT_SIZE} allowed in S3." 30 | in str(exc) 31 | ) 32 | 33 | chunk_size_1 = _chunk_size_calculator(sample_chunk_size_4mb) 34 | assert chunk_size_1 >= S3_MIN_PART_SIZE 35 | -------------------------------------------------------------------------------- /test/unit/test_dbapi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from snowflake.connector.dbapi import Binary 5 | 6 | 7 | def test_Binary(): 8 | assert Binary(b"foo") == b"foo" 9 | -------------------------------------------------------------------------------- /test/unit/test_dependencies.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | import cryptography.utils 4 | import pytest 5 | 6 | 7 | @pytest.mark.xfail(reason="Deprecation warning is expected to fail the test") 8 | def test_cryptography_deprecated(): 9 | deprecate_warning = cryptography.utils.deprecated( 10 | cryptography.utils.CryptographyDeprecationWarning, 11 | "test", 12 | "test", 13 | cryptography.utils.CryptographyDeprecationWarning, 14 | ) 15 | warnings.warn( 16 | deprecate_warning.message, deprecate_warning.warning_class, stacklevel=2 17 | ) 18 | -------------------------------------------------------------------------------- /test/unit/test_error_arrow_stream.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ..helpers import ( 4 | _arrow_error_stream_chunk_remove_random_length_bytes_test, 5 | _arrow_error_stream_chunk_remove_single_byte_test, 6 | _arrow_error_stream_random_input_test, 7 | ) 8 | 9 | 10 | @pytest.mark.skipolddriver 11 | def test_connector_error_base64_stream_chunk_remove_single_byte(): 12 | _arrow_error_stream_chunk_remove_single_byte_test(use_table_iterator=False) 13 | 14 | 15 | @pytest.mark.skipolddriver 16 | def test_connector_error_base64_stream_chunk_remove_random_length_bytes(): 17 | _arrow_error_stream_chunk_remove_random_length_bytes_test(use_table_iterator=False) 18 | 19 | 20 | @pytest.mark.skipolddriver 21 | def test_connector_error_random_input(): 22 | _arrow_error_stream_random_input_test(use_table_iterator=False) 23 | -------------------------------------------------------------------------------- /test/unit/test_errors.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | import uuid 5 | 6 | from snowflake.connector import errors 7 | 8 | 9 | def test_detecting_duplicate_detail_insertion(): 10 | sfqid = str(uuid.uuid4()) 11 | query = "select something_really_buggy from buggy_table" 12 | sqlstate = "24000" 13 | errno = 123456 14 | msg = "Some error happened" 15 | expected_msg = re.compile(rf"{errno} \({sqlstate}\): {sfqid}: {msg}") 16 | original_ex = errors.ProgrammingError( 17 | sqlstate=sqlstate, 18 | sfqid=sfqid, 19 | query=query, 20 | errno=errno, 21 | msg=msg, 22 | ) 23 | # Test whether regular exception confirms to what we expect to see 24 | assert expected_msg.fullmatch(original_ex.msg) 25 | 26 | # Test whether exception with flag confirms to what we expect to see 27 | assert errors.ProgrammingError( 28 | msg=original_ex.msg, 29 | done_format_msg=True, 30 | ) 31 | # Test whether exception with auto detection confirms to what we expect to see 32 | assert errors.ProgrammingError( 33 | msg=original_ex.msg, 34 | ) 35 | 36 | 37 | def test_args(): 38 | assert errors.Error("msg").args == ("msg",) 39 | -------------------------------------------------------------------------------- /test/unit/test_parse_account.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import annotations 3 | 4 | from snowflake.connector.util_text import parse_account 5 | 6 | 7 | def test_parse_account_basic(): 8 | assert parse_account("account1") == "account1" 9 | 10 | assert parse_account("account1.eu-central-1") == "account1" 11 | 12 | assert ( 13 | parse_account("account1-jkabfvdjisoa778wqfgeruishafeuw89q.global") == "account1" 14 | ) 15 | -------------------------------------------------------------------------------- /test/unit/test_text_util.py: -------------------------------------------------------------------------------- 1 | import concurrent.futures 2 | import random 3 | 4 | import pytest 5 | 6 | try: 7 | from snowflake.connector.util_text import random_string 8 | except ImportError: 9 | pass 10 | 11 | pytestmark = pytest.mark.skipolddriver # old test driver tests won't run this module 12 | 13 | 14 | def test_random_string_generation_with_same_global_seed(): 15 | random.seed(42) 16 | random_string1 = random_string() 17 | random.seed(42) 18 | random_string2 = random_string() 19 | assert ( 20 | isinstance(random_string1, str) 21 | and isinstance(random_string2, str) 22 | and random_string1 != random_string2 23 | ) 24 | 25 | def get_random_string(): 26 | random.seed(42) 27 | return random_string() 28 | 29 | with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: 30 | # Submit tasks to the pool and get future objects 31 | futures = [executor.submit(get_random_string) for _ in range(5)] 32 | res = [f.result() for f in futures] 33 | assert len(set(res)) == 5 # no duplicate string 34 | -------------------------------------------------------------------------------- /test/unit/test_url_util.py: -------------------------------------------------------------------------------- 1 | try: 2 | from snowflake.connector.url_util import ( 3 | extract_top_level_domain_from_hostname, 4 | is_valid_url, 5 | url_encode_str, 6 | ) 7 | except ImportError: 8 | 9 | def is_valid_url(s): 10 | return False 11 | 12 | def url_encode_str(s): 13 | return "" 14 | 15 | 16 | def test_url_validator(): 17 | assert is_valid_url("https://ssoTestURL.okta.com") 18 | assert is_valid_url("https://ssoTestURL.okta.com:8080") 19 | assert is_valid_url("https://ssoTestURL.okta.com/testpathvalue") 20 | assert is_valid_url( 21 | "https://sso.abc.com/idp/startSSO.ping?PartnerSpId=https://xyz.eu-central-1.snowflakecomputing.com/" 22 | ) 23 | 24 | assert not is_valid_url("-a Calculator") 25 | assert not is_valid_url("This is a random text") 26 | assert not is_valid_url("file://TestForFile") 27 | 28 | 29 | def test_encoder(): 30 | assert url_encode_str("Hello @World") == "Hello+%40World" 31 | assert url_encode_str("Test//String") == "Test%2F%2FString" 32 | assert url_encode_str(None) == "" 33 | 34 | 35 | def test_extract_top_level_domain_from_hostname(): 36 | assert extract_top_level_domain_from_hostname("www.snowflakecomputing.com") == "com" 37 | assert extract_top_level_domain_from_hostname("www.snowflakecomputing.cn") == "cn" 38 | assert ( 39 | extract_top_level_domain_from_hostname("www.snowflakecomputing.com.cn") == "cn" 40 | ) 41 | assert extract_top_level_domain_from_hostname("a.b.c.d") == "d" 42 | assert extract_top_level_domain_from_hostname() == "com" 43 | assert extract_top_level_domain_from_hostname("a") == "com" 44 | assert extract_top_level_domain_from_hostname("a.b.c.def123") == "com" 45 | -------------------------------------------------------------------------------- /test/unit/test_util.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | try: 4 | from snowflake.connector._utils import _TrackedQueryCancellationTimer 5 | except ImportError: 6 | pass 7 | 8 | pytestmark = pytest.mark.skipolddriver 9 | 10 | 11 | def test_timer(): 12 | timer = _TrackedQueryCancellationTimer(1, lambda: None) 13 | timer.start() 14 | timer.join() 15 | assert timer.executed 16 | 17 | timer = _TrackedQueryCancellationTimer(1, lambda: None) 18 | timer.start() 19 | timer.cancel() 20 | assert not timer.executed 21 | -------------------------------------------------------------------------------- /test/unit/test_wiremock_client.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Generator 2 | 3 | import pytest 4 | 5 | # old driver support 6 | try: 7 | from snowflake.connector.vendored import requests 8 | except ImportError: 9 | import requests 10 | 11 | 12 | from ..wiremock.wiremock_utils import WiremockClient 13 | 14 | 15 | @pytest.mark.skipolddriver 16 | @pytest.fixture(scope="session") 17 | def wiremock_client() -> Generator[WiremockClient, Any, None]: 18 | with WiremockClient() as client: 19 | yield client 20 | 21 | 22 | def test_wiremock(wiremock_client): 23 | connection_reset_by_peer_mapping = { 24 | "mappings": [ 25 | { 26 | "scenarioName": "Basic example", 27 | "requiredScenarioState": "Started", 28 | "request": {"method": "GET", "url": "/endpoint"}, 29 | "response": {"status": 200}, 30 | } 31 | ], 32 | "importOptions": {"duplicatePolicy": "IGNORE", "deleteAllNotInImport": True}, 33 | } 34 | wiremock_client.import_mapping(connection_reset_by_peer_mapping) 35 | 36 | response = requests.get( 37 | f"http://{wiremock_client.wiremock_host}:{wiremock_client.wiremock_http_port}/endpoint" 38 | ) 39 | 40 | assert response is not None, "response is None" 41 | assert ( 42 | response.status_code == requests.codes.ok 43 | ), f"response status is not 200, received status {response.status_code}" 44 | -------------------------------------------------------------------------------- /test/wiremock/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snowflakedb/snowflake-connector-python/c53aad7b4ec46d7fe77de3397dc625b4f1eb9935/test/wiremock/__init__.py -------------------------------------------------------------------------------- /tested_requirements/README.md: -------------------------------------------------------------------------------- 1 | # Tested requirements files 2 | 3 | ## Usage 4 | 5 | These requirements files can be used by customers to reproduce the exact environment 6 | Snowflake used to run tests with. 7 | 8 | Use it like a regular requirements file, with: 9 | 10 | ```shell 11 | python -m pip install -r requirements_37.reqs 12 | ``` 13 | -------------------------------------------------------------------------------- /tested_requirements/requirements_310.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.10.17 2 | asn1crypto==1.5.1 3 | boto3==1.38.4 4 | botocore==1.38.4 5 | certifi==2025.4.26 6 | cffi==1.17.1 7 | charset-normalizer==3.4.1 8 | cryptography==44.0.2 9 | filelock==3.18.0 10 | idna==3.10 11 | jmespath==1.0.1 12 | packaging==25.0 13 | platformdirs==4.3.7 14 | pycparser==2.22 15 | PyJWT==2.10.1 16 | pyOpenSSL==25.0.0 17 | python-dateutil==2.9.0.post0 18 | pytz==2025.2 19 | requests==2.32.3 20 | s3transfer==0.12.0 21 | six==1.17.0 22 | sortedcontainers==2.4.0 23 | tomlkit==0.13.2 24 | typing_extensions==4.13.2 25 | urllib3==2.4.0 26 | snowflake-connector-python==3.15.0 27 | -------------------------------------------------------------------------------- /tested_requirements/requirements_311.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.11.12 2 | asn1crypto==1.5.1 3 | boto3==1.38.4 4 | botocore==1.38.4 5 | certifi==2025.4.26 6 | cffi==1.17.1 7 | charset-normalizer==3.4.1 8 | cryptography==44.0.2 9 | filelock==3.18.0 10 | idna==3.10 11 | jmespath==1.0.1 12 | packaging==25.0 13 | platformdirs==4.3.7 14 | pycparser==2.22 15 | PyJWT==2.10.1 16 | pyOpenSSL==25.0.0 17 | python-dateutil==2.9.0.post0 18 | pytz==2025.2 19 | requests==2.32.3 20 | s3transfer==0.12.0 21 | six==1.17.0 22 | sortedcontainers==2.4.0 23 | tomlkit==0.13.2 24 | typing_extensions==4.13.2 25 | urllib3==2.4.0 26 | snowflake-connector-python==3.15.0 27 | -------------------------------------------------------------------------------- /tested_requirements/requirements_312.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.12.10 2 | asn1crypto==1.5.1 3 | boto3==1.38.4 4 | botocore==1.38.4 5 | certifi==2025.4.26 6 | cffi==1.17.1 7 | charset-normalizer==3.4.1 8 | cryptography==44.0.2 9 | filelock==3.18.0 10 | idna==3.10 11 | jmespath==1.0.1 12 | packaging==25.0 13 | platformdirs==4.3.7 14 | pycparser==2.22 15 | PyJWT==2.10.1 16 | pyOpenSSL==25.0.0 17 | python-dateutil==2.9.0.post0 18 | pytz==2025.2 19 | requests==2.32.3 20 | s3transfer==0.12.0 21 | setuptools==80.0.0 22 | six==1.17.0 23 | sortedcontainers==2.4.0 24 | tomlkit==0.13.2 25 | typing_extensions==4.13.2 26 | urllib3==2.4.0 27 | wheel==0.45.1 28 | snowflake-connector-python==3.15.0 29 | -------------------------------------------------------------------------------- /tested_requirements/requirements_313.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.13.3 2 | asn1crypto==1.5.1 3 | boto3==1.38.4 4 | botocore==1.38.4 5 | certifi==2025.4.26 6 | cffi==1.17.1 7 | charset-normalizer==3.4.1 8 | cryptography==44.0.2 9 | filelock==3.18.0 10 | idna==3.10 11 | jmespath==1.0.1 12 | packaging==25.0 13 | platformdirs==4.3.7 14 | pycparser==2.22 15 | PyJWT==2.10.1 16 | pyOpenSSL==25.0.0 17 | python-dateutil==2.9.0.post0 18 | pytz==2025.2 19 | requests==2.32.3 20 | s3transfer==0.12.0 21 | setuptools==80.0.0 22 | six==1.17.0 23 | sortedcontainers==2.4.0 24 | tomlkit==0.13.2 25 | typing_extensions==4.13.2 26 | urllib3==2.4.0 27 | wheel==0.45.1 28 | snowflake-connector-python==3.15.0 29 | -------------------------------------------------------------------------------- /tested_requirements/requirements_39.reqs: -------------------------------------------------------------------------------- 1 | # Generated on: Python 3.9.22 2 | asn1crypto==1.5.1 3 | boto3==1.38.4 4 | botocore==1.38.4 5 | certifi==2025.4.26 6 | cffi==1.17.1 7 | charset-normalizer==3.4.1 8 | cryptography==44.0.2 9 | filelock==3.18.0 10 | idna==3.10 11 | jmespath==1.0.1 12 | packaging==25.0 13 | platformdirs==4.3.7 14 | pycparser==2.22 15 | PyJWT==2.10.1 16 | pyOpenSSL==25.0.0 17 | python-dateutil==2.9.0.post0 18 | pytz==2025.2 19 | requests==2.32.3 20 | s3transfer==0.12.0 21 | six==1.17.0 22 | sortedcontainers==2.4.0 23 | tomlkit==0.13.2 24 | typing_extensions==4.13.2 25 | urllib3==1.26.20 26 | snowflake-connector-python==3.15.0 27 | --------------------------------------------------------------------------------