├── .env.example ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── documentation.md │ └── feature_request.md ├── actions │ ├── build-docs │ │ └── action.yml │ ├── bump-version │ │ ├── .gitignore │ │ ├── action.js │ │ ├── action.test.js │ │ ├── action.yml │ │ ├── core.js │ │ ├── index.js │ │ ├── jest.config.json │ │ ├── package-lock.json │ │ └── package.json │ ├── cleanup-all │ │ ├── action.yml │ │ └── cleanup-test-projects.py │ ├── index-create │ │ ├── action.yml │ │ └── create.py │ ├── index-delete │ │ ├── action.yml │ │ └── delete.py │ ├── project-create │ │ ├── action.yml │ │ └── script.py │ ├── project-delete │ │ ├── action.yml │ │ ├── delete-project.py │ │ └── delete-resources.py │ ├── run-integration-test │ │ └── action.yaml │ ├── secret-decrypt │ │ ├── action.yaml │ │ └── script.py │ ├── secret-encrypt │ │ ├── action.yml │ │ └── script.py │ ├── setup-poetry │ │ └── action.yml │ ├── test-dependency-asyncio-rest │ │ └── action.yaml │ ├── test-dependency-grpc │ │ └── action.yaml │ └── test-dependency-rest │ │ └── action.yaml └── workflows │ ├── add-labels.yaml │ ├── build-and-publish-docs.yaml │ ├── cleanup-nightly.yaml │ ├── on-merge.yaml │ ├── on-pr-dep-change.yaml │ ├── on-pr.yaml │ ├── project-cleanup.yaml │ ├── project-setup.yaml │ ├── publish-to-pypi.yaml │ ├── release-dev.yaml │ ├── release-prod.yaml │ ├── testing-dependency-asyncio.yaml │ ├── testing-dependency-grpc.yaml │ ├── testing-dependency-rest.yaml │ ├── testing-dependency.yaml │ ├── testing-install.yaml │ ├── testing-integration.yaml │ ├── testing-lint.yaml │ └── testing-unit.yaml ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── MAINTAINERS.md ├── MANIFEST.in ├── Makefile ├── README.md ├── codegen ├── buf.gen.yaml ├── buf.lock ├── buf.yaml ├── build-grpc.sh └── build-oas.sh ├── docs ├── _static │ ├── custom.css │ └── pinecone-logo.svg ├── asyncio.rst ├── client-configuration.md ├── conf.py ├── db_control │ ├── collections.md │ ├── pod-indexes.md │ ├── serverless-indexes.md │ ├── shared-index-actions.md │ └── shared-index-configs.md ├── db_data │ └── index-usage-byov.md ├── faq.md ├── favicon-32x32.png ├── grpc.rst ├── index.rst ├── inference │ └── inference-api.md ├── maintainers │ ├── debugging.md │ ├── release-workflow.png │ ├── releasing.md │ └── testing-guide.md ├── rest.rst ├── upgrading.md └── working-with-indexes.rst ├── mypy.ini ├── pinecone ├── __init__.py ├── __init__.pyi ├── __version__ ├── config │ ├── __init__.py │ ├── config.py │ ├── openapi_config_factory.py │ ├── openapi_configuration.py │ └── pinecone_config.py ├── control │ └── __init__.py ├── core │ ├── __init__.py │ ├── grpc │ │ └── protos │ │ │ ├── db_data_2025_01_pb2.py │ │ │ ├── db_data_2025_01_pb2.pyi │ │ │ └── db_data_2025_01_pb2_grpc.py │ └── openapi │ │ ├── db_control │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ └── manage_indexes_api.py │ │ ├── apis │ │ │ └── __init__.py │ │ ├── model │ │ │ ├── __init__.py │ │ │ ├── backup_list.py │ │ │ ├── backup_model.py │ │ │ ├── byoc_spec.py │ │ │ ├── collection_list.py │ │ │ ├── collection_model.py │ │ │ ├── configure_index_request.py │ │ │ ├── configure_index_request_embed.py │ │ │ ├── configure_index_request_spec.py │ │ │ ├── configure_index_request_spec_pod.py │ │ │ ├── create_backup_request.py │ │ │ ├── create_collection_request.py │ │ │ ├── create_index_for_model_request.py │ │ │ ├── create_index_for_model_request_embed.py │ │ │ ├── create_index_from_backup_request.py │ │ │ ├── create_index_from_backup_response.py │ │ │ ├── create_index_request.py │ │ │ ├── deletion_protection.py │ │ │ ├── error_response.py │ │ │ ├── error_response_error.py │ │ │ ├── index_list.py │ │ │ ├── index_model.py │ │ │ ├── index_model_spec.py │ │ │ ├── index_model_status.py │ │ │ ├── index_spec.py │ │ │ ├── index_tags.py │ │ │ ├── model_index_embed.py │ │ │ ├── pagination_response.py │ │ │ ├── pod_spec.py │ │ │ ├── pod_spec_metadata_config.py │ │ │ ├── restore_job_list.py │ │ │ ├── restore_job_model.py │ │ │ └── serverless_spec.py │ │ └── models │ │ │ └── __init__.py │ │ ├── db_data │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ ├── bulk_operations_api.py │ │ │ ├── namespace_operations_api.py │ │ │ └── vector_operations_api.py │ │ ├── apis │ │ │ └── __init__.py │ │ ├── model │ │ │ ├── __init__.py │ │ │ ├── delete_request.py │ │ │ ├── describe_index_stats_request.py │ │ │ ├── fetch_response.py │ │ │ ├── hit.py │ │ │ ├── import_error_mode.py │ │ │ ├── import_model.py │ │ │ ├── index_description.py │ │ │ ├── list_imports_response.py │ │ │ ├── list_item.py │ │ │ ├── list_namespaces_response.py │ │ │ ├── list_response.py │ │ │ ├── namespace_description.py │ │ │ ├── namespace_summary.py │ │ │ ├── pagination.py │ │ │ ├── protobuf_any.py │ │ │ ├── protobuf_null_value.py │ │ │ ├── query_request.py │ │ │ ├── query_response.py │ │ │ ├── query_vector.py │ │ │ ├── rpc_status.py │ │ │ ├── scored_vector.py │ │ │ ├── search_records_request.py │ │ │ ├── search_records_request_query.py │ │ │ ├── search_records_request_rerank.py │ │ │ ├── search_records_response.py │ │ │ ├── search_records_response_result.py │ │ │ ├── search_records_vector.py │ │ │ ├── search_usage.py │ │ │ ├── search_vector.py │ │ │ ├── single_query_results.py │ │ │ ├── sparse_values.py │ │ │ ├── start_import_request.py │ │ │ ├── start_import_response.py │ │ │ ├── update_request.py │ │ │ ├── upsert_record.py │ │ │ ├── upsert_request.py │ │ │ ├── upsert_response.py │ │ │ ├── usage.py │ │ │ ├── vector.py │ │ │ └── vector_values.py │ │ └── models │ │ │ └── __init__.py │ │ └── inference │ │ ├── __init__.py │ │ ├── api │ │ ├── __init__.py │ │ └── inference_api.py │ │ ├── apis │ │ └── __init__.py │ │ ├── model │ │ ├── __init__.py │ │ ├── dense_embedding.py │ │ ├── document.py │ │ ├── embed_request.py │ │ ├── embed_request_inputs.py │ │ ├── embedding.py │ │ ├── embeddings_list.py │ │ ├── embeddings_list_usage.py │ │ ├── error_response.py │ │ ├── error_response_error.py │ │ ├── model_info.py │ │ ├── model_info_list.py │ │ ├── model_info_metric.py │ │ ├── model_info_supported_metrics.py │ │ ├── model_info_supported_parameter.py │ │ ├── ranked_document.py │ │ ├── rerank_request.py │ │ ├── rerank_result.py │ │ ├── rerank_result_usage.py │ │ └── sparse_embedding.py │ │ └── models │ │ └── __init__.py ├── data │ ├── __init__.py │ └── features │ │ ├── __init__.py │ │ ├── bulk_imports │ │ └── __init__.py │ │ └── inference │ │ └── __init__.py ├── db_control │ ├── __init__.py │ ├── db_control.py │ ├── db_control_asyncio.py │ ├── enums │ │ ├── __init__.py │ │ ├── clouds.py │ │ ├── deletion_protection.py │ │ ├── metric.py │ │ ├── pod_index_environment.py │ │ ├── pod_type.py │ │ └── vector_type.py │ ├── index_host_store.py │ ├── models │ │ ├── __init__.py │ │ ├── backup_list.py │ │ ├── backup_model.py │ │ ├── byoc_spec.py │ │ ├── collection_description.py │ │ ├── collection_list.py │ │ ├── index_description.py │ │ ├── index_list.py │ │ ├── index_model.py │ │ ├── list_response.py │ │ ├── pod_spec.py │ │ ├── restore_job_list.py │ │ ├── restore_job_model.py │ │ └── serverless_spec.py │ ├── repr_overrides.py │ ├── request_factory.py │ ├── resources │ │ ├── __init__.py │ │ ├── asyncio │ │ │ ├── __init__.py │ │ │ ├── backup.py │ │ │ ├── collection.py │ │ │ ├── index.py │ │ │ └── restore_job.py │ │ └── sync │ │ │ ├── __init__.py │ │ │ ├── backup.py │ │ │ ├── collection.py │ │ │ ├── index.py │ │ │ └── restore_job.py │ └── types │ │ ├── __init__.py │ │ └── create_index_for_model_embed.py ├── db_data │ ├── __init__.py │ ├── dataclasses │ │ ├── __init__.py │ │ ├── fetch_response.py │ │ ├── search_query.py │ │ ├── search_query_vector.py │ │ ├── search_rerank.py │ │ ├── sparse_values.py │ │ ├── utils.py │ │ └── vector.py │ ├── errors.py │ ├── index.py │ ├── index_asyncio.py │ ├── index_asyncio_interface.py │ ├── interfaces.py │ ├── models │ │ └── __init__.py │ ├── query_results_aggregator.py │ ├── request_factory.py │ ├── resources │ │ ├── asyncio │ │ │ └── bulk_import_asyncio.py │ │ └── sync │ │ │ ├── bulk_import.py │ │ │ └── bulk_import_request_factory.py │ ├── sparse_values_factory.py │ ├── types │ │ ├── __init__.py │ │ ├── query_filter.py │ │ ├── search_query_typed_dict.py │ │ ├── search_query_vector_typed_dict.py │ │ ├── search_rerank_typed_dict.py │ │ ├── sparse_vector_typed_dict.py │ │ ├── vector_metadata_dict.py │ │ ├── vector_tuple.py │ │ └── vector_typed_dict.py │ └── vector_factory.py ├── deprecated_plugins.py ├── deprecation_warnings.py ├── exceptions │ ├── __init__.py │ └── exceptions.py ├── grpc │ ├── __init__.py │ ├── base.py │ ├── channel_factory.py │ ├── config.py │ ├── future.py │ ├── grpc_runner.py │ ├── index_grpc.py │ ├── pinecone.py │ ├── retry.py │ ├── sparse_values_factory.py │ ├── utils.py │ └── vector_factory_grpc.py ├── inference │ ├── __init__.py │ ├── inference.py │ ├── inference_asyncio.py │ ├── inference_request_builder.py │ ├── models │ │ ├── __init__.py │ │ ├── embedding_list.py │ │ ├── index_embed.py │ │ ├── model_info.py │ │ ├── model_info_list.py │ │ └── rerank_result.py │ ├── repl_overrides.py │ └── resources │ │ ├── asyncio │ │ └── model.py │ │ └── sync │ │ └── model.py ├── langchain_import_warnings.py ├── legacy_pinecone_interface.py ├── models │ └── __init__.py ├── openapi_support │ ├── __init__.py │ ├── api_client.py │ ├── api_client_utils.py │ ├── api_version.py │ ├── asyncio_api_client.py │ ├── asyncio_endpoint.py │ ├── auth_util.py │ ├── cached_class_property.py │ ├── configuration.py │ ├── configuration_lazy.py │ ├── constants.py │ ├── deserializer.py │ ├── endpoint.py │ ├── endpoint_utils.py │ ├── exceptions.py │ ├── model_utils.py │ ├── rest_aiohttp.py │ ├── rest_urllib3.py │ ├── rest_utils.py │ ├── retry_aiohttp.py │ ├── retry_urllib3.py │ ├── serializer.py │ └── types.py ├── pinecone.py ├── pinecone_asyncio.py ├── pinecone_interface_asyncio.py ├── py.typed └── utils │ ├── __init__.py │ ├── check_kwargs.py │ ├── constants.py │ ├── convert_enum_to_string.py │ ├── convert_to_list.py │ ├── deprecation_notice.py │ ├── docslinks.py │ ├── error_handling.py │ ├── filter_dict.py │ ├── find_legacy_imports.py │ ├── fix_tuple_length.py │ ├── lazy_imports.py │ ├── legacy_imports.py │ ├── normalize_host.py │ ├── parse_args.py │ ├── plugin_aware.py │ ├── repr_overrides.py │ ├── require_kwargs.py │ ├── setup_openapi_client.py │ ├── tqdm.py │ ├── user_agent.py │ └── version.py ├── poetry.lock ├── pyproject.toml ├── scripts ├── generate_usage.py ├── repl.py ├── test-async-retry.py └── test-server.py └── tests ├── __init__.py ├── dependency ├── __init__.py ├── asyncio-rest │ └── test_sanity.py ├── conftest.py ├── grpc │ └── test_sanity.py └── rest │ └── test_sanity.py ├── integration ├── __init__.py ├── conftest.py ├── control │ ├── __init__.py │ ├── pod │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_collections.py │ │ ├── test_collections_errors.py │ │ ├── test_configure_pod_index.py │ │ ├── test_create_index.py │ │ └── test_deletion_protection.py │ ├── resources │ │ ├── __init__.py │ │ ├── backup │ │ │ ├── __init__.py │ │ │ └── test_backup.py │ │ ├── collections │ │ │ ├── __init__.py │ │ │ ├── helpers.py │ │ │ └── test_dense_index.py │ │ ├── conftest.py │ │ ├── index │ │ │ ├── __init__.py │ │ │ ├── test_configure.py │ │ │ ├── test_create.py │ │ │ ├── test_delete.py │ │ │ ├── test_describe.py │ │ │ ├── test_has.py │ │ │ └── test_list.py │ │ └── restore_job │ │ │ ├── __init__.py │ │ │ ├── test_describe.py │ │ │ └── test_list.py │ └── serverless │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_configure_index_deletion_protection.py │ │ ├── test_configure_index_tags.py │ │ ├── test_create_index.py │ │ ├── test_create_index_api_errors.py │ │ ├── test_create_index_for_model.py │ │ ├── test_create_index_for_model_errors.py │ │ ├── test_create_index_timeouts.py │ │ ├── test_create_index_type_errors.py │ │ ├── test_describe_index.py │ │ ├── test_has_index.py │ │ ├── test_list_indexes.py │ │ └── test_sparse_index.py ├── control_asyncio │ ├── __init__.py │ ├── conftest.py │ ├── resources │ │ ├── __init__.py │ │ ├── backup │ │ │ ├── __init__.py │ │ │ └── test_backup.py │ │ ├── conftest.py │ │ ├── index │ │ │ ├── __init__.py │ │ │ ├── conftest.py │ │ │ └── test_create.py │ │ └── restore_job │ │ │ ├── __init__.py │ │ │ ├── test_describe.py │ │ │ └── test_list.py │ ├── test_configure_index_deletion_protection.py │ ├── test_configure_index_tags.py │ ├── test_create_index.py │ ├── test_create_index_api_errors.py │ ├── test_create_index_for_model.py │ ├── test_create_index_for_model_errors.py │ ├── test_create_index_timeouts.py │ ├── test_create_index_type_errors.py │ ├── test_describe_index.py │ ├── test_has_index.py │ ├── test_list_indexes.py │ └── test_sparse_index.py ├── data │ ├── __init__.py │ ├── conftest.py │ ├── seed.py │ ├── test_delete_future.py │ ├── test_fetch.py │ ├── test_fetch_future.py │ ├── test_initialization.py │ ├── test_list.py │ ├── test_list_errors.py │ ├── test_list_sparse.py │ ├── test_query.py │ ├── test_query_errors.py │ ├── test_query_namespaces.py │ ├── test_query_namespaces_sparse.py │ ├── test_search_and_upsert_records.py │ ├── test_upsert_dense.py │ ├── test_upsert_errors.py │ ├── test_upsert_from_dataframe.py │ ├── test_upsert_future.py │ ├── test_upsert_hybrid.py │ ├── test_upsert_sparse.py │ └── test_weird_ids.py ├── data_asyncio │ ├── __init__.py │ ├── conftest.py │ ├── test_client_instantiation.py │ ├── test_list.py │ ├── test_query.py │ ├── test_query_namespaces.py │ ├── test_query_namespaces_sparse.py │ ├── test_query_sparse.py │ ├── test_search_and_upsert_records.py │ ├── test_unauthorized_access.py │ ├── test_update.py │ ├── test_update_sparse.py │ ├── test_upsert.py │ └── test_upsert_sparse.py ├── helpers │ ├── __init__.py │ └── helpers.py ├── inference │ ├── __init__.py │ ├── asyncio │ │ ├── __init__.py │ │ ├── test_embeddings.py │ │ ├── test_models.py │ │ └── test_rerank.py │ └── sync │ │ ├── __init__.py │ │ ├── test_embeddings.py │ │ ├── test_models.py │ │ └── test_rerank.py ├── plugins │ └── test_plugins.py ├── proxy_config │ ├── .mitm │ │ ├── proxy1 │ │ │ ├── mitmproxy-ca-cert.cer │ │ │ ├── mitmproxy-ca-cert.p12 │ │ │ ├── mitmproxy-ca-cert.pem │ │ │ ├── mitmproxy-ca.p12 │ │ │ ├── mitmproxy-ca.pem │ │ │ └── mitmproxy-dhparam.pem │ │ └── proxy2 │ │ │ ├── mitmproxy-ca-cert.cer │ │ │ ├── mitmproxy-ca-cert.p12 │ │ │ ├── mitmproxy-ca-cert.pem │ │ │ ├── mitmproxy-ca.p12 │ │ │ ├── mitmproxy-ca.pem │ │ │ └── mitmproxy-dhparam.pem │ ├── __init__.py │ ├── conftest.py │ ├── logs │ │ └── .gitkeep │ └── test_proxy_settings.py └── test_upsert.py ├── perf ├── test_query_namespaces.py └── test_query_results_aggregator.py ├── unit ├── __init__.py ├── data │ ├── test_bulk_import.py │ ├── test_datetime_parsing.py │ ├── test_import_datetime_parsing.py │ ├── test_instantiation.py │ ├── test_request_factory.py │ └── test_vector_factory.py ├── db_control │ ├── test_index.py │ └── test_index_request_factory.py ├── models │ ├── test_collection_list.py │ ├── test_index_embed.py │ ├── test_index_list.py │ └── test_index_model.py ├── openapi_support │ ├── __init__.py │ ├── test_api_client.py │ ├── test_model_simple.py │ └── test_retries.py ├── test_config.py ├── test_config_builder.py ├── test_control.py ├── test_index.py ├── test_index_initialization.py ├── test_langchain_helpful_errors.py ├── test_plugin_aware.py ├── test_query_results_aggregator.py ├── test_version.py └── utils │ ├── test_convert_to_list.py │ ├── test_docs_links.py │ ├── test_normalize_host.py │ ├── test_setup_openapi_client.py │ └── test_user_agent.py ├── unit_grpc ├── conftest.py ├── test_channel_factory.py ├── test_futures.py ├── test_grpc_index_describe_index_stats.py ├── test_grpc_index_fetch.py ├── test_grpc_index_initialization.py ├── test_grpc_index_query.py ├── test_grpc_index_update.py ├── test_grpc_index_upsert.py ├── test_runner.py ├── test_sparse_values_factory.py └── test_vector_factory_grpc.py └── upgrade ├── test_all.py ├── test_reorganization.py └── test_v6_upgrade.py /.env.example: -------------------------------------------------------------------------------- 1 | ## Fill in real values and rename this file to .env before 2 | ## running integration tests on your machine. 3 | 4 | ## This should be your personal API key. These will get picked up 5 | ## and used any time you run integration tests under 6 | ## "poetry run pytest tests/integration" 7 | ## 8 | ## This key is also read and used to setup the pc client instance 9 | ## when running "poetry run repl". This makes it easy to do 10 | ## one-off manual testing. 11 | PINECONE_API_KEY='' 12 | 13 | ## If you set this variable, you can also use the pcci client instance 14 | ## when running "poetry run repl" in order to do cleanup/management 15 | ## on the project used from CI. 16 | PINECONE_API_KEY_CI_TESTING='' 17 | 18 | ## These headers get picked up and attached to every request by the code in 19 | ## pinecone/config/pinecone_config.py 20 | ## 21 | ## The x-environment header is used to route requests to preprod. The value needs to be 22 | ## a JSON string so it can be properly stored and read from an env var. 23 | PINECONE_ADDITIONAL_HEADERS='{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' 24 | 25 | ## There's a bunch of tests in tests/integration/data/test_weird_ids.py 26 | ## that we don't need to run most of the time. Only when refactoring the rat's nest 27 | ## of generated code to ensure we haven't broken something subtle with string handling. 28 | SKIP_WEIRD=true 29 | 30 | ## Some tests can run with either the Pinecone or PineconeGrpc client depending on 31 | ## whether this value is set. 32 | USE_GRPC=false 33 | 34 | ## When debugging, you may want to enable PINECONE_DEBUG_CURL this to see some requests translated into 35 | ## curl syntax. These are useful when reporting API issues to the backend team so they 36 | ## can be reproduced without having to setup a python repro. WARNING: This output will 37 | ## include the Api-Key header. 38 | # PINECONE_DEBUG_CURL='true' 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[Bug] " 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is this a new bug?** 11 | In other words: Is this an error, flaw, failure or fault? Please search Github issues and check our [Community Forum](https://community.pinecone.io/) to see if someone has already reported the bug you encountered. 12 | 13 | If this is a request for help or troubleshooting code in your own Pinecone project, please join the [Pinecone Community Forum](https://community.pinecone.io/). 14 | 15 | - [ ] I believe this is a new bug 16 | - [ ] I have searched the existing Github issues and Community Forum, and I could not find an existing post for this bug 17 | 18 | **Describe the bug** 19 | Describe the functionality that was working before but is broken now. 20 | 21 | **Error information** 22 | If you have one, please include the full stack trace here. If not, please share as much as you can about the error. 23 | 24 | **Steps to reproduce the issue locally** 25 | Include steps to reproduce the issue here. If you have sample code or a script that can be used to replicate this issue, please include that as well (including any dependent files to run the code). 26 | 27 | **Environment** 28 | * OS Version: 29 | * Python version: 30 | * Python SDK version: 31 | 32 | **Additional context** 33 | Add any other context about the problem here. 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Pinecone Community Forum 4 | url: https://community.pinecone.io/ 5 | about: For support, please see the community forum. 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation 3 | about: Report an issue in our docs 4 | title: "[Docs] " 5 | labels: 'documentation' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Description** 11 | Describe the issue that you've encountered with our documentation. 12 | 13 | **Suggested solution** 14 | Describe how this issue could be fixed or improved. 15 | 16 | **Link to page** 17 | Add a link to the exact documentation page where the issue occurred. 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[Feature Request]" 5 | labels: 'enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **What motivated you to submit this feature request?** 11 | A clear and concise description of why you are requesting this feature - e.g. "Being able to do x would allow me to..." 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/actions/build-docs/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Build client documentation' 2 | description: 'Generates client documentation using sphinx' 3 | inputs: 4 | python-version: 5 | description: 'Python version to use' 6 | required: true 7 | default: '3.x' 8 | runs: 9 | using: 'composite' 10 | steps: 11 | - name: Setup Poetry 12 | uses: ./.github/actions/setup-poetry 13 | with: 14 | include_grpc: 'true' 15 | include_dev: 'true' 16 | include_asyncio: 'true' 17 | python_version: ${{ inputs.python-version }} 18 | 19 | - name: Pretend this project requires Python 3.11 20 | shell: bash 21 | run: | 22 | # Poetry won't let me install sphinx as a dev dependency in this project 23 | # because of the wide range of versions our library supports. So during this 24 | # action, we'll pretend this project requires Python 3.11 or greater. 25 | sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml 26 | poetry lock 27 | poetry install -E grpc -E asyncio 28 | 29 | - name: Install sphinx 30 | shell: bash 31 | run: | 32 | poetry add sphinx myst-parser --group dev 33 | 34 | - name: Build html documentation 35 | shell: bash 36 | run: | 37 | poetry run sphinx-build -b html docs docsbuild 38 | 39 | - name: Discard changes to pyproject.toml and poetry.lock 40 | shell: bash 41 | run: | 42 | git checkout pyproject.toml 43 | git checkout poetry.lock 44 | -------------------------------------------------------------------------------- /.github/actions/bump-version/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | -------------------------------------------------------------------------------- /.github/actions/bump-version/action.js: -------------------------------------------------------------------------------- 1 | const core = require('./core'); 2 | 3 | function bumpVersion(currentVersion, bumpType, prerelease) { 4 | let newVersion = calculateNewVersion(currentVersion, bumpType); 5 | 6 | if (prerelease) { 7 | newVersion = `${newVersion}.${prerelease}`; 8 | } 9 | core.setOutput('previous_version', currentVersion); 10 | core.setOutput('previous_version_tag', `v${currentVersion}`); 11 | core.setOutput('version', newVersion); 12 | core.setOutput('version_tag', `v${newVersion}`); 13 | 14 | return newVersion; 15 | } 16 | 17 | function calculateNewVersion(currentVersion, bumpType) { 18 | const [major, minor, patch] = currentVersion.split('.'); 19 | let newVersion; 20 | 21 | switch (bumpType) { 22 | case 'major': 23 | newVersion = `${parseInt(major) + 1}.0.0`; 24 | break; 25 | case 'minor': 26 | newVersion = `${major}.${parseInt(minor) + 1}.0`; 27 | break; 28 | case 'patch': 29 | newVersion = `${major}.${minor}.${parseInt(patch) + 1}`; 30 | break; 31 | default: 32 | throw new Error(`Invalid bumpType: ${bumpType}`); 33 | } 34 | 35 | return newVersion; 36 | } 37 | 38 | module.exports = { bumpVersion }; 39 | -------------------------------------------------------------------------------- /.github/actions/bump-version/action.yml: -------------------------------------------------------------------------------- 1 | name: 'pinecone-io/bump-version' 2 | 3 | description: 'Bumps the version number in a file' 4 | 5 | inputs: 6 | versionFile: 7 | description: 'Path to a file containing the version number' 8 | required: true 9 | bumpType: 10 | description: 'The type of version bump (major, minor, patch)' 11 | required: true 12 | prereleaseSuffix: 13 | description: 'Optional prerelease identifier to append to the version number' 14 | required: false 15 | default: '' 16 | 17 | outputs: 18 | version: 19 | description: 'The new version number' 20 | version_tag: 21 | description: 'The new version tag' 22 | previous_version: 23 | description: 'The previous version number' 24 | previous_version_tag: 25 | description: 'The previous version tag' 26 | 27 | runs: 28 | using: 'node20' 29 | main: 'index.js' 30 | -------------------------------------------------------------------------------- /.github/actions/bump-version/index.js: -------------------------------------------------------------------------------- 1 | const action = require('./action'); 2 | const fs = require('fs'); 3 | const core = require('./core'); 4 | 5 | const version = fs.readFileSync(core.getInput('versionFile'), 'utf8'); 6 | 7 | const newVersion = action.bumpVersion( 8 | version, 9 | core.getInput('bumpType'), 10 | core.getInput('prereleaseSuffix') 11 | ); 12 | 13 | fs.writeFileSync(core.getInput('versionFile'), newVersion); -------------------------------------------------------------------------------- /.github/actions/bump-version/jest.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "verbose": true 3 | } -------------------------------------------------------------------------------- /.github/actions/bump-version/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bump-version", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest" 8 | }, 9 | "author": "", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "jest": "^29.5.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /.github/actions/cleanup-all/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Cleanup All' 2 | description: 'Delete all resources created by tests' 3 | 4 | inputs: 5 | PINECONE_API_KEY: 6 | description: 'The Pinecone API key' 7 | required: true 8 | PINECONE_ADDITIONAL_HEADERS: 9 | description: 'Additional headers to send with the request' 10 | required: false 11 | default: '{"sdk-test-suite": "pinecone-python-client"}' 12 | PINECONE_SERVICE_ACCOUNT_CLIENT_ID: 13 | description: 'The Pinecone service account client ID' 14 | required: true 15 | PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET: 16 | description: 'The Pinecone service account client secret' 17 | required: true 18 | 19 | runs: 20 | using: 'composite' 21 | steps: 22 | - name: Setup Poetry 23 | uses: ./.github/actions/setup-poetry 24 | - name: Cleanup all 25 | shell: bash 26 | run: poetry run python3 ./.github/actions/cleanup-all/cleanup-test-projects.py 27 | env: 28 | PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} 29 | PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} 30 | PINECONE_SERVICE_ACCOUNT_CLIENT_ID: ${{ inputs.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }} 31 | PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET: ${{ inputs.PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET }} 32 | -------------------------------------------------------------------------------- /.github/actions/index-create/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Create Index' 2 | description: 'Creates an index to be used in other tests' 3 | 4 | inputs: 5 | region: 6 | description: 'The region of the index' 7 | required: false 8 | default: 'us-west-2' 9 | cloud: 10 | description: 'The cloud of the index' 11 | required: false 12 | default: 'aws' 13 | name_prefix: 14 | description: 'The prefix of the index name' 15 | required: false 16 | default: 'index-name' 17 | dimension: 18 | description: 'The dimension of the index' 19 | required: false 20 | default: '3' 21 | metric: 22 | description: 'The metric of the index' 23 | required: false 24 | default: 'cosine' 25 | PINECONE_API_KEY: 26 | description: 'The Pinecone API key' 27 | required: true 28 | PINECONE_ADDITIONAL_HEADERS: 29 | description: 'Additional headers to send with the request' 30 | required: false 31 | default: '{"sdk-test-suite": "pinecone-python-client"}' 32 | 33 | 34 | outputs: 35 | index_name: 36 | description: 'The name of the index, including randomized suffix' 37 | value: ${{ steps.create-index.outputs.index_name }} 38 | 39 | runs: 40 | using: 'composite' 41 | steps: 42 | - name: Setup Poetry 43 | uses: ./.github/actions/setup-poetry 44 | 45 | - name: Create index 46 | id: create-index 47 | shell: bash 48 | run: poetry run python3 ./.github/actions/index-create/create.py 49 | env: 50 | PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} 51 | PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} 52 | NAME_PREFIX: ${{ inputs.name_prefix }} 53 | REGION: ${{ inputs.region }} 54 | CLOUD: ${{ inputs.cloud }} 55 | DIMENSION: ${{ inputs.dimension }} 56 | METRIC: ${{ inputs.metric }} 57 | -------------------------------------------------------------------------------- /.github/actions/index-delete/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Delete Index' 2 | description: 'Deletes an index to be used in other tests' 3 | 4 | inputs: 5 | index_name: 6 | description: 'The name of the index to delete' 7 | required: true 8 | PINECONE_API_KEY: 9 | description: 'The Pinecone API key' 10 | required: true 11 | PINECONE_ADDITIONAL_HEADERS: 12 | description: 'Additional headers to send with the request' 13 | required: false 14 | default: '{"sdk-test-suite": "pinecone-python-client"}' 15 | 16 | 17 | runs: 18 | using: 'composite' 19 | steps: 20 | - name: Setup Poetry 21 | uses: ./.github/actions/setup-poetry 22 | 23 | - name: Delete index 24 | shell: bash 25 | run: poetry run python3 ./.github/actions/index-delete/delete.py 26 | env: 27 | PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} 28 | PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} 29 | INDEX_NAME: ${{ inputs.index_name }} 30 | -------------------------------------------------------------------------------- /.github/actions/index-delete/delete.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pinecone import Pinecone 3 | 4 | 5 | def read_env_var(name): 6 | value = os.environ.get(name) 7 | if value is None: 8 | raise "Environment variable {} is not set".format(name) 9 | return value 10 | 11 | 12 | def main(): 13 | pc = Pinecone(api_key=read_env_var("PINECONE_API_KEY")) 14 | to_delete = read_env_var("INDEX_NAME") 15 | pc.delete_index(name=to_delete) 16 | print("Index deleted: " + to_delete) 17 | 18 | 19 | if __name__ == "__main__": 20 | main() 21 | -------------------------------------------------------------------------------- /.github/actions/run-integration-test/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Run Integration Test' 2 | description: 'Run an integration test' 3 | 4 | inputs: 5 | encrypted_project_api_key: 6 | description: 'The encrypted Pinecone API key' 7 | required: true 8 | encryption_key: 9 | description: 'The encryption key' 10 | required: true 11 | test_suite: 12 | description: 'The test suite to run' 13 | required: true 14 | PINECONE_ADDITIONAL_HEADERS: 15 | description: 'Additional headers to send with the request' 16 | required: false 17 | default: '{"sdk-test-suite": "pinecone-python-client"}' 18 | use_grpc: 19 | description: 'Whether to use gRPC or REST' 20 | required: false 21 | default: 'false' 22 | 23 | runs: 24 | using: 'composite' 25 | steps: 26 | - name: Decrypt Pinecone API key 27 | id: decrypt-api-key 28 | uses: ./.github/actions/secret-decrypt 29 | with: 30 | encrypted_secret: ${{ inputs.encrypted_project_api_key }} 31 | encryption_key: ${{ inputs.encryption_key }} 32 | 33 | - name: Run tests 34 | id: run-tests 35 | shell: bash 36 | run: poetry run pytest tests/integration/${{ inputs.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG 37 | env: 38 | PINECONE_API_KEY: ${{ steps.decrypt-api-key.outputs.decrypted_secret }} 39 | PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} 40 | USE_GRPC: ${{ inputs.use_grpc }} 41 | SKIP_WEIRD: 'true' 42 | -------------------------------------------------------------------------------- /.github/actions/secret-decrypt/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Decrypt secret' 2 | description: 'Decrypts a secret using the Fernet encryption algorithm' 3 | 4 | inputs: 5 | encrypted_secret: 6 | description: 'The encrypted secret to decrypt' 7 | required: true 8 | encryption_key: 9 | description: 'The encryption key for use with the Fernet encryption algorithm' 10 | required: true 11 | 12 | outputs: 13 | decrypted_secret: 14 | description: 'The decrypted secret' 15 | value: ${{ steps.decrypt-secret.outputs.decrypted_secret }} 16 | 17 | runs: 18 | using: 'composite' 19 | steps: 20 | - name: Install deps 21 | shell: bash 22 | run: | 23 | pip install cryptography 24 | 25 | - name: Decrypt secret 26 | id: decrypt-secret 27 | shell: bash 28 | run: python3 ./.github/actions/secret-decrypt/script.py 29 | env: 30 | ENCRYPTED_SECRET: ${{ inputs.encrypted_secret }} 31 | FERNET_ENCRYPTION_KEY: ${{ inputs.encryption_key }} 32 | -------------------------------------------------------------------------------- /.github/actions/secret-decrypt/script.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from cryptography.fernet import Fernet 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | def main(): 9 | encrypted_secret = os.getenv("ENCRYPTED_SECRET") 10 | encryption_key = os.getenv("FERNET_ENCRYPTION_KEY") 11 | 12 | if encrypted_secret is None: 13 | raise Exception("ENCRYPTED_SECRET is not set") 14 | if encryption_key is None: 15 | raise Exception("FERNET_ENCRYPTION_KEY is not set") 16 | 17 | cipher_suite = Fernet(encryption_key.encode()) 18 | decrypted_secret = cipher_suite.decrypt(encrypted_secret.encode()).decode() 19 | 20 | output_file = os.environ.get("GITHUB_OUTPUT", None) 21 | if output_file is None: 22 | logger.error("GITHUB_OUTPUT is not set, cannot write to output file") 23 | else: 24 | with open(output_file, "a") as f: 25 | f.write(f"decrypted_secret={decrypted_secret}\n") 26 | 27 | 28 | if __name__ == "__main__": 29 | main() 30 | -------------------------------------------------------------------------------- /.github/actions/secret-encrypt/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Encrypt Secret' 2 | description: 'Encrypts a secret using the Fernet encryption algorithm' 3 | 4 | inputs: 5 | secret: 6 | description: 'The secret to encrypt' 7 | required: true 8 | encryption_key: 9 | description: 'The encryption key for use with the Fernet encryption algorithm' 10 | required: true 11 | 12 | outputs: 13 | encrypted_secret: 14 | description: 'The encrypted secret' 15 | value: ${{ steps.encrypt-secret.outputs.encrypted_secret }} 16 | 17 | runs: 18 | using: 'composite' 19 | steps: 20 | - name: Install deps 21 | shell: bash 22 | run: | 23 | pip install cryptography 24 | 25 | - name: Encrypt secret 26 | id: encrypt-secret 27 | shell: bash 28 | run: python3 ./.github/actions/secret-encrypt/script.py 29 | env: 30 | SECRET: ${{ inputs.secret }} 31 | ENCRYPTION_KEY: ${{ inputs.encryption_key }} 32 | -------------------------------------------------------------------------------- /.github/actions/secret-encrypt/script.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from cryptography.fernet import Fernet 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | def mask(value): 9 | """Mask the value in Github Actions logs""" 10 | print(f"::add-mask::{value}") 11 | 12 | 13 | def main(): 14 | secret = os.getenv("SECRET") 15 | encryption_key = os.getenv("ENCRYPTION_KEY") 16 | 17 | if secret is None: 18 | raise Exception("SECRET is not set") 19 | if encryption_key is None: 20 | raise Exception("ENCRYPTION_KEY is not set") 21 | 22 | mask(secret) 23 | mask(encryption_key) 24 | 25 | cipher_suite = Fernet(encryption_key.encode()) 26 | encrypted_secret = cipher_suite.encrypt(secret.encode()).decode() 27 | 28 | output_file = os.environ.get("GITHUB_OUTPUT", None) 29 | if output_file is None: 30 | logger.error("GITHUB_OUTPUT is not set, cannot write to output file") 31 | else: 32 | with open(output_file, "a") as f: 33 | f.write(f"encrypted_secret={encrypted_secret}\n") 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /.github/actions/setup-poetry/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Setup Poetry' 2 | description: 'Installs Poetry and dependencies' 3 | inputs: 4 | include_grpc: 5 | description: 'Install gRPC dependencies' 6 | required: true 7 | default: 'false' 8 | include_dev: 9 | description: 'Install dev dependencies' 10 | required: true 11 | default: 'true' 12 | include_types: 13 | description: 'Install typing dependencies (mypy, type stubs, etc)' 14 | required: true 15 | default: 'true' 16 | include_asyncio: 17 | description: 'Install asyncio dependencies' 18 | required: true 19 | default: 'false' 20 | python_version: 21 | description: 'Python version to use' 22 | required: true 23 | default: '3.9' 24 | 25 | runs: 26 | using: 'composite' 27 | steps: 28 | - name: Set up Python 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: ${{ inputs.python_version }} 32 | 33 | - name: Install Poetry 34 | uses: snok/install-poetry@v1 35 | 36 | - name: Install dependencies 37 | shell: bash 38 | env: 39 | INCLUDE_GRPC: ${{ inputs.include_grpc }} 40 | INCLUDE_DEV: ${{ inputs.include_dev }} 41 | INCLUDE_TYPES: ${{ inputs.include_types }} 42 | INCLUDE_ASYNCIO: ${{ inputs.include_asyncio }} 43 | run: | 44 | GRPC_FLAG=$( [ "$INCLUDE_GRPC" = "true" ] && echo "--extras grpc" || echo "" ) 45 | ASYNCIO_FLAG=$( [ "$INCLUDE_ASYNCIO" = "true" ] && echo "--extras asyncio" || echo "" ) 46 | DEV_FLAG=$( [ "$INCLUDE_DEV" = "false" ] && echo "--without dev" || echo "" ) 47 | TYPING_FLAG=$( [ "$INCLUDE_TYPES" = "true" ] && echo "--with types" || echo "" ) 48 | echo "Installing dependencies with flags: $DEV_FLAG $TYPING_FLAG $GRPC_FLAG $ASYNCIO_FLAG" 49 | poetry install $DEV_FLAG $TYPING_FLAG $GRPC_FLAG $ASYNCIO_FLAG 50 | -------------------------------------------------------------------------------- /.github/actions/test-dependency-asyncio-rest/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Test aiohttp dependencies' 2 | description: 'Runs asyncio sanity test with specific aiohttp dependencies' 3 | 4 | inputs: 5 | PINECONE_API_KEY: 6 | description: 'The Pinecone API key' 7 | required: true 8 | PINECONE_ADDITIONAL_HEADERS: 9 | description: 'Additional headers to send with the request' 10 | required: false 11 | default: '{"sdk-test-suite": "pinecone-python-client"}' 12 | index_name: 13 | description: 'The name of the index' 14 | required: true 15 | python_version: 16 | description: 'The version of Python to use' 17 | required: false 18 | default: '3.9' 19 | aiohttp_version: 20 | description: 'The version of aiohttp to install' 21 | required: true 22 | 23 | runs: 24 | using: 'composite' 25 | steps: 26 | - name: Setup Poetry 27 | uses: ./.github/actions/setup-poetry 28 | with: 29 | include_grpc: false 30 | include_types: false 31 | include_asyncio: true 32 | python_version: ${{ inputs.python_version }} 33 | 34 | - name: 'Install aiohttp ${{ inputs.aiohttp_version }}' 35 | run: 'poetry add aiohttp==${{ inputs.aiohttp_version }}' 36 | shell: bash 37 | 38 | - uses: nick-fields/retry@v3 39 | with: 40 | timeout_minutes: 5 41 | max_attempts: 3 42 | retry_on: error 43 | command: poetry run pytest tests/dependency/asyncio-rest -s -v 44 | env: 45 | PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' 46 | PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' 47 | INDEX_NAME: '${{ inputs.index_name }}' 48 | -------------------------------------------------------------------------------- /.github/actions/test-dependency-rest/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Test REST Dependencies' 2 | description: 'Runs sanity test with specific REST dependencies' 3 | 4 | inputs: 5 | PINECONE_API_KEY: 6 | description: 'The Pinecone API key' 7 | required: true 8 | PINECONE_ADDITIONAL_HEADERS: 9 | description: 'Additional headers to send with the request' 10 | required: false 11 | default: '{"sdk-test-suite": "pinecone-python-client"}' 12 | index_name: 13 | description: 'The name of the index' 14 | required: true 15 | python_version: 16 | description: 'The version of Python to use' 17 | required: false 18 | default: '3.9' 19 | urllib3_version: 20 | description: 'The version of urllib3 to install' 21 | required: true 22 | 23 | runs: 24 | using: 'composite' 25 | steps: 26 | - name: Setup Poetry 27 | uses: ./.github/actions/setup-poetry 28 | with: 29 | include_grpc: false 30 | include_types: false 31 | python_version: ${{ inputs.python_version }} 32 | 33 | - name: 'Install urllib3 ${{ matrix.urllib3-version }}' 34 | run: 'poetry add urllib3==${{ matrix.urllib3-version }}' 35 | shell: bash 36 | 37 | - uses: nick-fields/retry@v3 38 | with: 39 | timeout_minutes: 5 40 | max_attempts: 3 41 | retry_on: error 42 | command: poetry run pytest tests/dependency/rest -s -v 43 | env: 44 | PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' 45 | PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' 46 | INDEX_NAME: '${{ inputs.index_name }}' 47 | -------------------------------------------------------------------------------- /.github/workflows/add-labels.yaml: -------------------------------------------------------------------------------- 1 | name: Label issues 2 | on: 3 | issues: 4 | types: 5 | - reopened 6 | - opened 7 | jobs: 8 | label_issues: 9 | runs-on: ubuntu-latest 10 | permissions: 11 | issues: write 12 | steps: 13 | - run: gh issue edit "$NUMBER" --add-label "$LABELS" 14 | env: 15 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | GH_REPO: ${{ github.repository }} 17 | NUMBER: ${{ github.event.issue.number }} 18 | LABELS: status:needs-triage 19 | -------------------------------------------------------------------------------- /.github/workflows/build-and-publish-docs.yaml: -------------------------------------------------------------------------------- 1 | name: "Build and publish documentation to sdk-docs" 2 | 3 | on: 4 | workflow_dispatch: {} 5 | workflow_call: 6 | secrets: 7 | SSH_DEPLOY_KEY: 8 | required: true 9 | 10 | jobs: 11 | build-and-deploy-documentation: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | 17 | - name: Generate sphinx documentation 18 | uses: ./.github/actions/build-docs 19 | with: 20 | python-version: 3.11 21 | 22 | - name: Push documentation artifacts to sdk-docs 23 | uses: cpina/github-action-push-to-another-repository@main 24 | env: 25 | SSH_DEPLOY_KEY: ${{ secrets.SSH_DEPLOY_KEY }} 26 | with: 27 | source-directory: docsbuild 28 | destination-github-username: pinecone-io 29 | destination-repository-name: sdk-docs 30 | user-email: clients@pinecone.io 31 | target-branch: main 32 | target-directory: python 33 | commit-message: "Python: automated documentation build - pinecone-python-client merge SHA: ${{ github.sha }}" 34 | -------------------------------------------------------------------------------- /.github/workflows/cleanup-nightly.yaml: -------------------------------------------------------------------------------- 1 | name: 'Cleanup All' 2 | 3 | on: 4 | workflow_dispatch: {} 5 | schedule: 6 | - cron: '5 22 * * *' # 5 minutes after 10pm UTC, every day 7 | 8 | jobs: 9 | cleanup-all: 10 | name: Cleanup all indexes/collections 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Cleanup all 15 | uses: ./.github/actions/cleanup-all 16 | with: 17 | PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} 18 | PINECONE_SERVICE_ACCOUNT_CLIENT_ID: ${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }} 19 | PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET: ${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET }} 20 | -------------------------------------------------------------------------------- /.github/workflows/on-pr-dep-change.yaml: -------------------------------------------------------------------------------- 1 | name: Testing (PR - Dependency Change) 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - 'pyproject.toml' 7 | - 'poetry.lock' 8 | workflow_dispatch: {} 9 | 10 | permissions: {} 11 | 12 | concurrency: 13 | group: 'ci-${{ github.workflow }}-${{ github.ref }}' 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | create-project: 18 | uses: './.github/workflows/project-setup.yaml' 19 | secrets: inherit 20 | 21 | dependency-tests: 22 | uses: './.github/workflows/testing-dependency.yaml' 23 | secrets: inherit 24 | needs: 25 | - create-project 26 | with: 27 | encrypted_project_api_key: ${{ needs.create-project.outputs.encrypted_project_api_key }} 28 | 29 | cleanup-project: 30 | if: ${{ always() }} 31 | needs: 32 | - dependency-tests 33 | - create-project 34 | uses: './.github/workflows/project-cleanup.yaml' 35 | secrets: inherit 36 | with: 37 | project_id: ${{ needs.create-project.outputs.project_id }} 38 | encrypted_project_api_key: ${{ needs.create-project.outputs.encrypted_project_api_key }} 39 | -------------------------------------------------------------------------------- /.github/workflows/on-pr.yaml: -------------------------------------------------------------------------------- 1 | name: Testing (PR) 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - 'docs/**' 7 | - '*.md' 8 | - '*.rst' 9 | - '*.txt' 10 | - '*.html' 11 | - '*.css' 12 | - '*.js' 13 | - '*.png' 14 | - '*.jpg' 15 | - '*.jpeg' 16 | - '*.gif' 17 | - '*.svg' 18 | - '*.example' 19 | workflow_dispatch: {} 20 | 21 | permissions: {} 22 | 23 | concurrency: 24 | group: 'ci-${{ github.workflow }}-${{ github.ref }}' 25 | cancel-in-progress: true 26 | 27 | jobs: 28 | linting: 29 | uses: './.github/workflows/testing-lint.yaml' 30 | 31 | unit-tests: 32 | uses: './.github/workflows/testing-unit.yaml' 33 | secrets: inherit 34 | with: 35 | python_versions_json: '["3.9"]' 36 | 37 | create-project: 38 | uses: './.github/workflows/project-setup.yaml' 39 | secrets: inherit 40 | needs: 41 | - unit-tests 42 | 43 | integration-tests: 44 | uses: './.github/workflows/testing-integration.yaml' 45 | secrets: inherit 46 | needs: 47 | - unit-tests 48 | - create-project 49 | with: 50 | encrypted_project_api_key: ${{ needs.create-project.outputs.encrypted_project_api_key }} 51 | python_versions_json: '["3.9"]' 52 | 53 | cleanup-project: 54 | if: ${{ always() }} 55 | needs: 56 | - create-project 57 | - integration-tests 58 | uses: './.github/workflows/project-cleanup.yaml' 59 | secrets: inherit 60 | with: 61 | project_id: ${{ needs.create-project.outputs.project_id }} 62 | encrypted_project_api_key: ${{ needs.create-project.outputs.encrypted_project_api_key }} 63 | -------------------------------------------------------------------------------- /.github/workflows/project-cleanup.yaml: -------------------------------------------------------------------------------- 1 | name: Project Cleanup 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | encrypted_project_api_key: 7 | type: string 8 | description: 'The encrypted project api key' 9 | project_id: 10 | type: string 11 | description: 'The project id' 12 | 13 | permissions: {} 14 | 15 | jobs: 16 | cleanup-project: 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 30 19 | steps: 20 | - uses: actions/checkout@v4 21 | - uses: ./.github/actions/setup-poetry 22 | with: 23 | python_version: 3.9 24 | - uses: ./.github/actions/project-delete 25 | with: 26 | FERNET_ENCRYPTION_KEY: '${{ secrets.FERNET_ENCRYPTION_KEY }}' 27 | PINECONE_SERVICE_ACCOUNT_CLIENT_ID: '${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }}' 28 | PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET: '${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET }}' 29 | api_version: '2025-04' 30 | project_id: ${{ inputs.project_id }} 31 | encrypted_project_api_key: ${{ inputs.encrypted_project_api_key }} 32 | -------------------------------------------------------------------------------- /.github/workflows/project-setup.yaml: -------------------------------------------------------------------------------- 1 | name: Project Setup 2 | 3 | on: 4 | workflow_call: 5 | outputs: 6 | encrypted_project_api_key: 7 | description: 'The encrypted project api key' 8 | value: ${{ jobs.create-project-job.outputs.encrypted_project_api_key }} 9 | project_id: 10 | description: 'The project id' 11 | value: ${{ jobs.create-project-job.outputs.project_id }} 12 | 13 | permissions: {} 14 | 15 | jobs: 16 | create-project-job: 17 | runs-on: ubuntu-latest 18 | outputs: 19 | encrypted_project_api_key: ${{ steps.create-project-step.outputs.encrypted_project_api_key }} 20 | project_id: ${{ steps.create-project-step.outputs.project_id }} 21 | steps: 22 | - uses: actions/checkout@v4 23 | - uses: ./.github/actions/setup-poetry 24 | with: 25 | python_version: 3.9 26 | - uses: ./.github/actions/project-create 27 | id: create-project-step 28 | with: 29 | PINECONE_SERVICE_ACCOUNT_CLIENT_ID: '${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }}' 30 | PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET: '${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET }}' 31 | FERNET_ENCRYPTION_KEY: '${{ secrets.FERNET_ENCRYPTION_KEY }}' 32 | api_version: '2025-04' 33 | name_prefix: 'python' 34 | max_pods: 10 35 | -------------------------------------------------------------------------------- /.github/workflows/release-dev.yaml: -------------------------------------------------------------------------------- 1 | name: 'PyPI Release: Pre-Release (pinecone)' 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | ref: 7 | description: 'Git ref to build (branch name or SHA)' 8 | required: true 9 | type: string 10 | default: 'main' 11 | releaseLevel: 12 | description: 'Release level' 13 | required: true 14 | type: choice 15 | default: 'patch' 16 | options: 17 | - 'patch' # bug fixes 18 | - 'minor' # new features, backwards compatible 19 | - 'major' # breaking changes 20 | prereleaseSuffix: 21 | description: 'Suffix to add onto the new version number in order to mark it as a prerelease' 22 | required: true 23 | type: string 24 | default: 'rc1' 25 | 26 | permissions: 27 | contents: write 28 | jobs: 29 | pypi: 30 | uses: './.github/workflows/publish-to-pypi.yaml' 31 | with: 32 | isPrerelease: true 33 | ref: ${{ inputs.ref }} 34 | releaseLevel: ${{ inputs.releaseLevel }} 35 | prereleaseSuffix: ${{ inputs.prereleaseSuffix }} 36 | TWINE_REPOSITORY: 'pypi' 37 | secrets: 38 | PYPI_USERNAME: __token__ 39 | PYPI_PASSWORD: ${{ secrets.PROD_PYPI_PUBLISH_TOKEN }} 40 | -------------------------------------------------------------------------------- /.github/workflows/release-prod.yaml: -------------------------------------------------------------------------------- 1 | name: 'PyPI Release: Production (pinecone)' 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | ref: 7 | description: 'Git ref to build (branch name or SHA)' 8 | required: true 9 | type: string 10 | default: 'main' 11 | releaseLevel: 12 | description: 'Release level' 13 | required: true 14 | type: choice 15 | default: 'patch' 16 | options: 17 | - 'patch' # bug fixes 18 | - 'minor' # new features, backwards compatible 19 | - 'major' # breaking changes 20 | 21 | permissions: 22 | contents: write 23 | 24 | jobs: 25 | unit-tests: 26 | uses: './.github/workflows/testing-unit.yaml' 27 | secrets: inherit 28 | integration-tests: 29 | uses: './.github/workflows/testing-integration.yaml' 30 | secrets: inherit 31 | dependency-tests: 32 | uses: './.github/workflows/testing-dependency.yaml' 33 | secrets: inherit 34 | needs: unit-tests 35 | install-tests: 36 | uses: './.github/workflows/testing-install.yaml' 37 | secrets: inherit 38 | 39 | pypi: 40 | uses: './.github/workflows/publish-to-pypi.yaml' 41 | needs: 42 | - unit-tests 43 | - integration-tests 44 | - dependency-tests 45 | - install-tests 46 | with: 47 | isPrerelease: false 48 | ref: ${{ inputs.ref }} 49 | releaseLevel: ${{ inputs.releaseLevel }} 50 | TWINE_REPOSITORY: 'pypi' 51 | prereleaseSuffix: '' 52 | secrets: 53 | PYPI_USERNAME: __token__ 54 | PYPI_PASSWORD: ${{ secrets.PROD_PYPI_PUBLISH_TOKEN }} 55 | 56 | docs-publish: 57 | uses: './.github/workflows/build-and-publish-docs.yaml' 58 | secrets: inherit 59 | needs: 60 | - pypi 61 | -------------------------------------------------------------------------------- /.github/workflows/testing-dependency-asyncio.yaml: -------------------------------------------------------------------------------- 1 | name: Dependency Testing (Asyncio) 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | index_name: 7 | required: true 8 | type: string 9 | encrypted_project_api_key: 10 | required: true 11 | type: string 12 | 13 | jobs: 14 | dependency-matrix-asyncio-rest: 15 | name: Deps (Asyncio REST) 16 | runs-on: ubuntu-latest 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | python_version: 21 | - 3.9 22 | - 3.13 23 | aiohttp_version: 24 | - 3.9.0 25 | - 3.11.5 26 | steps: 27 | - uses: actions/checkout@v4 28 | - uses: ./.github/actions/secret-decrypt 29 | id: decrypt-secret 30 | with: 31 | encryption_key: '${{ secrets.FERNET_ENCRYPTION_KEY }}' 32 | encrypted_secret: ${{ inputs.encrypted_project_api_key }} 33 | - uses: ./.github/actions/test-dependency-asyncio-rest 34 | with: 35 | python_version: '${{ matrix.python_version }}' 36 | index_name: '${{ inputs.index_name }}' 37 | PINECONE_API_KEY: '${{ steps.decrypt-secret.outputs.decrypted_secret }}' 38 | PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' 39 | aiohttp_version: '${{ matrix.aiohttp_version }}' 40 | -------------------------------------------------------------------------------- /.github/workflows/testing-lint.yaml: -------------------------------------------------------------------------------- 1 | name: "Lint" 2 | on: 3 | workflow_call: {} 4 | 5 | permissions: {} 6 | 7 | jobs: 8 | lint: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - uses: chartboost/ruff-action@v1 13 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "codegen/apis"] 2 | path = codegen/apis 3 | url = git@github.com:pinecone-io/apis.git 4 | [submodule "codegen/python-oas-templates"] 5 | path = codegen/python-oas-templates 6 | url = git@github.com:pinecone-io/python-oas-templates.git 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v3.2.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-yaml 8 | - id: check-added-large-files 9 | - repo: https://github.com/astral-sh/ruff-pre-commit 10 | # Ruff version. 11 | rev: v0.6.7 12 | hooks: 13 | # Run the linter. 14 | - id: ruff 15 | args: [ --fix ] 16 | # Run the formatter. 17 | - id: ruff-format 18 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE.txt pinecone/__version__ pinecone/__environment__ 2 | recursive-exclude tests * 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: image develop tests tag-and-push docs version package upload upload-spruce license 2 | mkfile_path := $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) 3 | 4 | PYPI_USERNAME ?= __token__ 5 | 6 | image: 7 | MODULE=pinecone ../scripts/build.sh ./ 8 | 9 | develop: 10 | poetry install -E grpc 11 | 12 | test-unit: 13 | @echo "Running tests..." 14 | poetry run pytest --cov=pinecone --timeout=120 tests/unit -s -vv 15 | 16 | test-integration: 17 | @echo "Running integration tests..." 18 | PINECONE_ENVIRONMENT="us-east4-gcp" SPEC='{"serverless": {"cloud": "aws", "region": "us-east-1" }}' DIMENSION=2 METRIC='cosine' GITHUB_BUILD_NUMBER='local' poetry run pytest tests/integration 19 | 20 | test-grpc-unit: 21 | @echo "Running tests..." 22 | poetry run pytest --cov=pinecone --timeout=120 tests/unit_grpc 23 | 24 | make type-check: 25 | poetry run mypy pinecone --exclude pinecone/core 26 | 27 | make generate-oas: 28 | ./codegen/build-oas.sh "2024-07" 29 | 30 | version: 31 | poetry version 32 | 33 | package: 34 | poetry build 35 | 36 | upload: 37 | poetry publish --verbose --username ${PYPI_USERNAME} --password ${PYPI_PASSWORD} 38 | -------------------------------------------------------------------------------- /codegen/buf.gen.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | plugins: 3 | - plugin: buf.build/grpc/python:v1.59.0 4 | out: gen 5 | - plugin: buf.build/protocolbuffers/python:v29.0 6 | out: gen 7 | - plugin: buf.build/protocolbuffers/pyi:v29.1 8 | out: gen 9 | -------------------------------------------------------------------------------- /codegen/buf.lock: -------------------------------------------------------------------------------- 1 | # Generated by buf. DO NOT EDIT. 2 | version: v2 3 | deps: 4 | - name: buf.build/googleapis/googleapis 5 | commit: acd896313c55464b993332136ded1b6e 6 | digest: b5:025d83e25193feb8dac5e5576113c8737006218b3b09fbc0d0ff652614da5424b336edb15bea139eb90d14eba656774a979d1fbdae81cbab2013932b84b98f53 7 | -------------------------------------------------------------------------------- /codegen/buf.yaml: -------------------------------------------------------------------------------- 1 | # For details on buf.yaml configuration, visit https://buf.build/docs/configuration/v2/buf-yaml 2 | version: v2 3 | lint: 4 | use: 5 | - STANDARD 6 | breaking: 7 | use: 8 | - FILE 9 | deps: 10 | - buf.build/googleapis/googleapis 11 | modules: 12 | - path: apis/_build/2025-01 13 | -------------------------------------------------------------------------------- /codegen/build-grpc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux -o pipefail 4 | version=$1 # e.g. 2024-07 5 | 6 | update_apis_repo() { 7 | echo "Updating apis repo" 8 | pushd codegen/apis 9 | git fetch 10 | git checkout main 11 | git pull 12 | just build 13 | popd 14 | } 15 | 16 | update_buf_config() { 17 | pushd codegen 18 | # Update buf config to find correct proto version 19 | sed -i '' "s/[0-9][0-9][0-9][0-9]-[0-1][0-9]/${version}/g" buf.yaml 20 | 21 | # Clean before building 22 | rm -rf gen 23 | 24 | # Ensure path valid by running the buf build command 25 | buf build 26 | popd 27 | } 28 | 29 | buf_generate() { 30 | pushd codegen 31 | # Generate the python code 32 | buf generate 33 | popd 34 | } 35 | 36 | update_apis_repo 37 | update_buf_config 38 | buf_generate 39 | 40 | dest="pinecone/core/grpc/protos/" 41 | 42 | # Remove existing files in dest 43 | rm -rf "${dest}*.py" 44 | rm -rf "${dest}*.pyi" 45 | 46 | find codegen/gen/ -name "*.py" | while IFS= read -r file; do 47 | sed -i '' "s/^import db_data/import pinecone.core.grpc.protos.db_data/g" "${file}" 48 | done 49 | 50 | # Copy the new generated files to dest directory 51 | cp codegen/gen/* ${dest} 52 | 53 | # Cleanup the intermediate files that were generated 54 | rm -rf codegen/gen 55 | -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | body, 2 | div.body h1, 3 | div.body h2, 4 | div.body h3, 5 | div.body h4, 6 | div.body h5, 7 | div.body h6, 8 | div.admonition p.admonition-title { 9 | font-family: "Inter", "Helvetica Neue", "Helvetica", "Arial", sans-serif; 10 | } 11 | 12 | .blurb { 13 | font-size: 16px; 14 | } 15 | 16 | p.admonition-title:after { 17 | content: ""; 18 | } 19 | 20 | div.code-block-caption { 21 | background-color: #EEE; 22 | border-bottom: 1px solid #CCC; 23 | font-size: 17px; 24 | padding: 10px; 25 | } 26 | 27 | dt:target { 28 | background-color: #E8E8E8; 29 | } 30 | 31 | .highlight { 32 | background-color: #F8F8F8; 33 | } 34 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import pinecone 2 | 3 | project = "Pinecone Python SDK" 4 | author = "Pinecone Systems, Inc." 5 | version = pinecone.__version__ 6 | copyright = "%Y, Pinecone Systems, Inc." 7 | 8 | html_baseurl = "https://sdk.pinecone.io/python" 9 | html_static_path = ["_static"] 10 | html_favicon = "favicon-32x32.png" 11 | 12 | extensions = [ 13 | "sphinx.ext.autodoc", 14 | "sphinx.ext.viewcode", 15 | "sphinx.ext.todo", 16 | "sphinx.ext.napoleon", 17 | "sphinx.ext.coverage", 18 | "sphinx.ext.autodoc.typehints", 19 | "myst_parser", 20 | ] 21 | 22 | # -- HTML Configuration ------------------------------------------------- 23 | 24 | html_theme = "alabaster" 25 | html_theme_options = { 26 | "logo": "pinecone-logo.svg", 27 | "description": "Pinecone Python SDK", 28 | "github_user": "pinecone-io", 29 | "github_repo": "pinecone-python-client", 30 | "github_button": True, 31 | "fixed_sidebar": True, 32 | "page_width": "1140px", 33 | "sidebar_width": "300px", 34 | "show_related": False, 35 | "show_powered_by": False, 36 | "extra_nav_links": { 37 | "Github Source": "https://github.com/pinecone-io/pinecone-python-client", 38 | "Pinecone Home": "https://pinecone.io", 39 | "Pinecone Docs": "https://docs.pinecone.io", 40 | "Pinecone Console": "https://app.pinecone.io", 41 | }, 42 | } 43 | -------------------------------------------------------------------------------- /docs/db_control/collections.md: -------------------------------------------------------------------------------- 1 | 2 | # Collections 3 | 4 | For general information on collections, please see [Understanding Collections](https://docs.pinecone.io/guides/indexes/pods/understanding-collections) 5 | 6 | Collections are archived copy of the records stored in a pod-based index. Records in a collection cannot be directly queried or modified. 7 | Some use-cases for collections are: 8 | 9 | - Creating multiple indexes from the same data in order to experiment with different index configurations 10 | - Making a backup of your data 11 | - Temporarily shutting down an index 12 | 13 | ## Create collection 14 | 15 | The following example creates the collection `example-collection` from a pod index named `example-index`. 16 | 17 | ```python 18 | from pinecone import Pinecone 19 | 20 | pc = Pinecone(api_key='<>') 21 | 22 | pc.create_collection( 23 | name="example-collection", 24 | source="example-index" 25 | ) 26 | ``` 27 | 28 | ## List collections 29 | 30 | The following example returns a list of the collections in the current project. 31 | 32 | ```python 33 | from pinecone import Pinecone 34 | 35 | pc = Pinecone(api_key='<>') 36 | 37 | active_collections = pc.list_collections() 38 | ``` 39 | 40 | ## Describe a collection 41 | 42 | The following example returns a description of the collection 43 | `example-collection`. 44 | 45 | ```python 46 | from pinecone import Pinecone 47 | 48 | pc = Pinecone(api_key='<>') 49 | 50 | collection_description = pc.describe_collection("example-collection") 51 | ``` 52 | 53 | ## Delete a collection 54 | 55 | The following example deletes the collection `example-collection`. 56 | 57 | ```python 58 | from pinecone import Pinecone 59 | 60 | pc = Pinecone(api_key='<>') 61 | 62 | pc.delete_collection("example-collection") 63 | ``` 64 | 65 | ## Creating an index from a collection 66 | -------------------------------------------------------------------------------- /docs/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/docs/favicon-32x32.png -------------------------------------------------------------------------------- /docs/inference/inference-api.md: -------------------------------------------------------------------------------- 1 | # Inference API 2 | 3 | The Pinecone SDK now supports creating embeddings via the [Inference API](https://docs.pinecone.io/guides/inference/understanding-inference). 4 | 5 | ```python 6 | from pinecone import Pinecone, EmbedModel 7 | 8 | pc = Pinecone(api_key="YOUR_API_KEY") 9 | 10 | # Embed documents 11 | text = [ 12 | "Turkey is a classic meat to eat at American Thanksgiving.", 13 | "Many people enjoy the beautiful mosques in Turkey.", 14 | ] 15 | text_embeddings = pc.inference.embed( 16 | model=EmbedModel.Multilingual_E5_Large, 17 | inputs=text, 18 | parameters={ 19 | "input_type": "passage", 20 | "truncate": "END" 21 | }, 22 | ) 23 | 24 | # Upsert documents into Pinecone index 25 | 26 | # Embed a query 27 | query = ["How should I prepare my turkey?"] 28 | query_embeddings = pc.inference.embed( 29 | model=model, 30 | inputs=query, 31 | parameters={ 32 | "input_type": "query", 33 | "truncate": "END" 34 | }, 35 | ) 36 | 37 | # Send query to Pinecone index to retrieve similar documents 38 | ``` 39 | -------------------------------------------------------------------------------- /docs/maintainers/release-workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/docs/maintainers/release-workflow.png -------------------------------------------------------------------------------- /docs/working-with-indexes.rst: -------------------------------------------------------------------------------- 1 | Indexes 2 | ======= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | db_control/serverless-indexes 8 | db_control/pod-indexes 9 | db_control/shared-index-actions 10 | db_control/shared-index-configs 11 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ; pretty = True 3 | ; disallow_untyped_calls = True 4 | ; check_untyped_defs = True 5 | ; disallow_untyped_defs = True 6 | ; warn_return_any = True 7 | ; warn_unused_configs = True 8 | 9 | # Per-module options: 10 | 11 | ; [mypy-mycode.foo.*] 12 | ; disallow_untyped_defs = True 13 | 14 | [mypy-google.api.*] 15 | ignore_missing_imports = True 16 | -------------------------------------------------------------------------------- /pinecone/__version__: -------------------------------------------------------------------------------- 1 | 7.0.2 -------------------------------------------------------------------------------- /pinecone/config/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | from .config import ConfigBuilder, Config 5 | from .openapi_configuration import Configuration as OpenApiConfiguration 6 | from .pinecone_config import PineconeConfig 7 | 8 | __all__ = [ 9 | "ConfigBuilder", 10 | "Config", 11 | "OpenApiConfiguration", 12 | "PineconeConfig", 13 | ] 14 | 15 | if os.getenv("PINECONE_DEBUG") is not None: 16 | logging.getLogger("pinecone").setLevel(level=logging.DEBUG) 17 | -------------------------------------------------------------------------------- /pinecone/config/pinecone_config.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict 2 | import logging 3 | import json 4 | import os 5 | from .config import ConfigBuilder, Config 6 | 7 | logger = logging.getLogger(__name__) 8 | """ :meta private: """ 9 | 10 | DEFAULT_CONTROLLER_HOST = "https://api.pinecone.io" 11 | 12 | 13 | class PineconeConfig: 14 | @staticmethod 15 | def build( 16 | api_key: Optional[str] = None, 17 | host: Optional[str] = None, 18 | additional_headers: Optional[Dict[str, str]] = {}, 19 | **kwargs, 20 | ) -> Config: 21 | host = ( 22 | host 23 | or kwargs.get("host") 24 | or os.getenv("PINECONE_CONTROLLER_HOST") 25 | or DEFAULT_CONTROLLER_HOST 26 | ) 27 | headers_json = os.getenv("PINECONE_ADDITIONAL_HEADERS") 28 | if headers_json: 29 | try: 30 | headers = json.loads(headers_json) 31 | additional_headers = additional_headers or headers 32 | except Exception as e: 33 | logger.warn(f"Ignoring PINECONE_ADDITIONAL_HEADERS: {e}") 34 | 35 | return ConfigBuilder.build( 36 | api_key=api_key, host=host, additional_headers=additional_headers, **kwargs 37 | ) 38 | -------------------------------------------------------------------------------- /pinecone/control/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from pinecone.db_control import * 4 | 5 | warnings.warn( 6 | "The module at `pinecone.control` has moved to `pinecone.db_control`. " 7 | "This warning will become an error in a future version of the Pinecone Python SDK.", 8 | DeprecationWarning, 9 | ) 10 | -------------------------------------------------------------------------------- /pinecone/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/pinecone/core/__init__.py -------------------------------------------------------------------------------- /pinecone/core/openapi/db_control/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | Pinecone Control Plane API 5 | 6 | Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 7 | 8 | This file is @generated using OpenAPI. 9 | 10 | The version of the OpenAPI document: 2025-04 11 | Contact: support@pinecone.io 12 | """ 13 | 14 | __version__ = "1.0.0" 15 | 16 | # import ApiClient 17 | from pinecone.openapi_support.api_client import ApiClient 18 | 19 | # import Configuration 20 | from pinecone.config.openapi_configuration import Configuration 21 | 22 | # import exceptions 23 | from pinecone.openapi_support.exceptions import PineconeException 24 | from pinecone.openapi_support.exceptions import PineconeApiAttributeError 25 | from pinecone.openapi_support.exceptions import PineconeApiTypeError 26 | from pinecone.openapi_support.exceptions import PineconeApiValueError 27 | from pinecone.openapi_support.exceptions import PineconeApiKeyError 28 | from pinecone.openapi_support.exceptions import PineconeApiException 29 | 30 | API_VERSION = "2025-04" 31 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_control/api/__init__.py: -------------------------------------------------------------------------------- 1 | # do not import all apis into this module because that uses a lot of memory and stack frames 2 | # if you need the ability to import all apis from one package, import them with 3 | # from pinecone.core.openapi.db_control.apis import ManageIndexesApi 4 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_control/apis/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | # Import all APIs into this package. 4 | # If you have many APIs here with many many models used in each API this may 5 | # raise a `RecursionError`. 6 | # In order to avoid this, import only the API that you directly need like: 7 | # 8 | # from .api.manage_indexes_api import ManageIndexesApi 9 | # 10 | # or import this package, but before doing it, use: 11 | # 12 | # import sys 13 | # sys.setrecursionlimit(n) 14 | 15 | # Import APIs into API package: 16 | from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi 17 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_control/model/__init__.py: -------------------------------------------------------------------------------- 1 | # we can not import model classes here because that would create a circular 2 | # reference which would not work in python2 3 | # do not import all models into this module because that uses a lot of memory and stack frames 4 | # if you need the ability to import all models from one package, import them with 5 | # from {{packageName}.models import ModelA, ModelB 6 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_data/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | Pinecone Data Plane API 5 | 6 | Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 7 | 8 | This file is @generated using OpenAPI. 9 | 10 | The version of the OpenAPI document: 2025-04 11 | Contact: support@pinecone.io 12 | """ 13 | 14 | __version__ = "1.0.0" 15 | 16 | # import ApiClient 17 | from pinecone.openapi_support.api_client import ApiClient 18 | 19 | # import Configuration 20 | from pinecone.config.openapi_configuration import Configuration 21 | 22 | # import exceptions 23 | from pinecone.openapi_support.exceptions import PineconeException 24 | from pinecone.openapi_support.exceptions import PineconeApiAttributeError 25 | from pinecone.openapi_support.exceptions import PineconeApiTypeError 26 | from pinecone.openapi_support.exceptions import PineconeApiValueError 27 | from pinecone.openapi_support.exceptions import PineconeApiKeyError 28 | from pinecone.openapi_support.exceptions import PineconeApiException 29 | 30 | API_VERSION = "2025-04" 31 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_data/api/__init__.py: -------------------------------------------------------------------------------- 1 | # do not import all apis into this module because that uses a lot of memory and stack frames 2 | # if you need the ability to import all apis from one package, import them with 3 | # from pinecone.core.openapi.db_data.apis import BulkOperationsApi 4 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_data/apis/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | # Import all APIs into this package. 4 | # If you have many APIs here with many many models used in each API this may 5 | # raise a `RecursionError`. 6 | # In order to avoid this, import only the API that you directly need like: 7 | # 8 | # from .api.bulk_operations_api import BulkOperationsApi 9 | # 10 | # or import this package, but before doing it, use: 11 | # 12 | # import sys 13 | # sys.setrecursionlimit(n) 14 | 15 | # Import APIs into API package: 16 | from pinecone.core.openapi.db_data.api.bulk_operations_api import BulkOperationsApi 17 | from pinecone.core.openapi.db_data.api.namespace_operations_api import NamespaceOperationsApi 18 | from pinecone.core.openapi.db_data.api.vector_operations_api import VectorOperationsApi 19 | -------------------------------------------------------------------------------- /pinecone/core/openapi/db_data/model/__init__.py: -------------------------------------------------------------------------------- 1 | # we can not import model classes here because that would create a circular 2 | # reference which would not work in python2 3 | # do not import all models into this module because that uses a lot of memory and stack frames 4 | # if you need the ability to import all models from one package, import them with 5 | # from {{packageName}.models import ModelA, ModelB 6 | -------------------------------------------------------------------------------- /pinecone/core/openapi/inference/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | Pinecone Inference API 5 | 6 | Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 7 | 8 | This file is @generated using OpenAPI. 9 | 10 | The version of the OpenAPI document: 2025-04 11 | Contact: support@pinecone.io 12 | """ 13 | 14 | __version__ = "1.0.0" 15 | 16 | # import ApiClient 17 | from pinecone.openapi_support.api_client import ApiClient 18 | 19 | # import Configuration 20 | from pinecone.config.openapi_configuration import Configuration 21 | 22 | # import exceptions 23 | from pinecone.openapi_support.exceptions import PineconeException 24 | from pinecone.openapi_support.exceptions import PineconeApiAttributeError 25 | from pinecone.openapi_support.exceptions import PineconeApiTypeError 26 | from pinecone.openapi_support.exceptions import PineconeApiValueError 27 | from pinecone.openapi_support.exceptions import PineconeApiKeyError 28 | from pinecone.openapi_support.exceptions import PineconeApiException 29 | 30 | API_VERSION = "2025-04" 31 | -------------------------------------------------------------------------------- /pinecone/core/openapi/inference/api/__init__.py: -------------------------------------------------------------------------------- 1 | # do not import all apis into this module because that uses a lot of memory and stack frames 2 | # if you need the ability to import all apis from one package, import them with 3 | # from pinecone.core.openapi.inference.apis import InferenceApi 4 | -------------------------------------------------------------------------------- /pinecone/core/openapi/inference/apis/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | # Import all APIs into this package. 4 | # If you have many APIs here with many many models used in each API this may 5 | # raise a `RecursionError`. 6 | # In order to avoid this, import only the API that you directly need like: 7 | # 8 | # from .api.inference_api import InferenceApi 9 | # 10 | # or import this package, but before doing it, use: 11 | # 12 | # import sys 13 | # sys.setrecursionlimit(n) 14 | 15 | # Import APIs into API package: 16 | from pinecone.core.openapi.inference.api.inference_api import InferenceApi 17 | -------------------------------------------------------------------------------- /pinecone/core/openapi/inference/model/__init__.py: -------------------------------------------------------------------------------- 1 | # we can not import model classes here because that would create a circular 2 | # reference which would not work in python2 3 | # do not import all models into this module because that uses a lot of memory and stack frames 4 | # if you need the ability to import all models from one package, import them with 5 | # from {{packageName}.models import ModelA, ModelB 6 | -------------------------------------------------------------------------------- /pinecone/data/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | warnings.warn( 4 | "The module at `pinecone.data` has moved to `pinecone.db_data`. " 5 | "Please update your imports. " 6 | "This warning will become an error in a future version of the Pinecone Python SDK.", 7 | DeprecationWarning, 8 | ) 9 | -------------------------------------------------------------------------------- /pinecone/data/features/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from .bulk_imports import * 4 | from .inference import * 5 | 6 | 7 | warnings.warn( 8 | "The module at `pinecone.data.features` has been removed. Code has been refactored and integrated into other parts of the client. " 9 | "This warning will become an error in a future version of the Pinecone Python SDK.", 10 | DeprecationWarning, 11 | ) 12 | -------------------------------------------------------------------------------- /pinecone/data/features/bulk_imports/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from pinecone.db_data.resources.asyncio.bulk_import_asyncio import * 4 | from pinecone.db_data.resources.sync.bulk_import import * 5 | from pinecone.db_data.resources.sync.bulk_import_request_factory import * 6 | 7 | 8 | warnings.warn( 9 | "The module at `pinecone.data.features.bulk_import` has moved to `pinecone.db_data.features.bulk_import`. " 10 | "Please update your imports. " 11 | "This warning will become an error in a future version of the Pinecone Python SDK.", 12 | DeprecationWarning, 13 | ) 14 | -------------------------------------------------------------------------------- /pinecone/data/features/inference/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from pinecone.inference import * 4 | 5 | warnings.warn( 6 | "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " 7 | "Please update your imports. " 8 | "This warning will become an error in a future version of the Pinecone Python SDK.", 9 | DeprecationWarning, 10 | ) 11 | -------------------------------------------------------------------------------- /pinecone/db_control/__init__.py: -------------------------------------------------------------------------------- 1 | from .enums import * 2 | from .models import * 3 | from .db_control import DBControl 4 | from .db_control_asyncio import DBControlAsyncio 5 | from .repr_overrides import install_repr_overrides 6 | 7 | __all__ = [ 8 | # from .enums 9 | "CloudProvider", 10 | "AwsRegion", 11 | "GcpRegion", 12 | "AzureRegion", 13 | "DeletionProtection", 14 | "Metric", 15 | "PodIndexEnvironment", 16 | "PodType", 17 | "VectorType", 18 | # from .models 19 | "CollectionDescription", 20 | "PodSpec", 21 | "PodSpecDefinition", 22 | "ServerlessSpec", 23 | "ServerlessSpecDefinition", 24 | "ByocSpec", 25 | "IndexList", 26 | "CollectionList", 27 | "IndexModel", 28 | "IndexEmbed", 29 | "BackupModel", 30 | "BackupList", 31 | "RestoreJobModel", 32 | "RestoreJobList", 33 | # direct imports 34 | "DBControl", 35 | "DBControlAsyncio", 36 | ] 37 | 38 | install_repr_overrides() 39 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/__init__.py: -------------------------------------------------------------------------------- 1 | from .clouds import CloudProvider, AwsRegion, GcpRegion, AzureRegion 2 | from .deletion_protection import DeletionProtection 3 | from .metric import Metric 4 | from .pod_index_environment import PodIndexEnvironment 5 | from .pod_type import PodType 6 | from .vector_type import VectorType 7 | 8 | __all__ = [ 9 | "CloudProvider", 10 | "AwsRegion", 11 | "GcpRegion", 12 | "AzureRegion", 13 | "DeletionProtection", 14 | "Metric", 15 | "PodIndexEnvironment", 16 | "PodType", 17 | "VectorType", 18 | ] 19 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/deletion_protection.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DeletionProtection(Enum): 5 | """The DeletionProtection setting of an index indicates whether the index 6 | can be the index cannot be deleted using the delete_index() method. 7 | 8 | If disabled, the index can be deleted. If enabled, calling delete_index() 9 | will raise an error. 10 | 11 | This setting can be changed using the configure_index() method. 12 | """ 13 | 14 | ENABLED = "enabled" 15 | DISABLED = "disabled" 16 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/metric.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Metric(Enum): 5 | """ 6 | The metric specifies how Pinecone should calculate the distance between vectors when querying an index. 7 | """ 8 | 9 | COSINE = "cosine" 10 | EUCLIDEAN = "euclidean" 11 | DOTPRODUCT = "dotproduct" 12 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/pod_index_environment.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class PodIndexEnvironment(Enum): 5 | """ 6 | These environment strings are used to specify where a pod index should be deployed. 7 | """ 8 | 9 | US_WEST1_GCP = "us-west1-gcp" 10 | US_CENTRAL1_GCP = "us-central1-gcp" 11 | US_WEST4_GCP = "us-west4-gcp" 12 | US_EAST4_GCP = "us-east4-gcp" 13 | NORTHAMERICA_NORTHEAST1_GCP = "northamerica-northeast1-gcp" 14 | ASIA_NORTHEAST1_GCP = "asia-northeast1-gcp" 15 | ASIA_SOUTHEAST1_GCP = "asia-southeast1-gcp" 16 | US_EAST1_GCP = "us-east1-gcp" 17 | EU_WEST1_GCP = "eu-west1-gcp" 18 | EU_WEST4_GCP = "eu-west4-gcp" 19 | US_EAST1_AWS = "us-east-1-aws" 20 | EASTUS_AZURE = "eastus-azure" 21 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/pod_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class PodType(Enum): 5 | """ 6 | PodType represents the available pod types for a pod index. 7 | """ 8 | 9 | P1_X1 = "p1.x1" 10 | P1_X2 = "p1.x2" 11 | P1_X4 = "p1.x4" 12 | P1_X8 = "p1.x8" 13 | S1_X1 = "s1.x1" 14 | S1_X2 = "s1.x2" 15 | S1_X4 = "s1.x4" 16 | S1_X8 = "s1.x8" 17 | P2_X1 = "p2.x1" 18 | P2_X2 = "p2.x2" 19 | P2_X4 = "p2.x4" 20 | P2_X8 = "p2.x8" 21 | -------------------------------------------------------------------------------- /pinecone/db_control/enums/vector_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class VectorType(Enum): 5 | """ 6 | VectorType is used to specifiy the type of vector you will store in the index. 7 | 8 | Dense vectors are used to store dense embeddings, which are vectors with non-zero values in most of the dimensions. 9 | 10 | Sparse vectors are used to store sparse embeddings, which allow vectors with zero values in most of the dimensions to be represented concisely. 11 | """ 12 | 13 | DENSE = "dense" 14 | SPARSE = "sparse" 15 | -------------------------------------------------------------------------------- /pinecone/db_control/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .index_description import ServerlessSpecDefinition, PodSpecDefinition 2 | from .collection_description import CollectionDescription 3 | from .serverless_spec import ServerlessSpec 4 | from .pod_spec import PodSpec 5 | from .byoc_spec import ByocSpec 6 | from .index_list import IndexList 7 | from .collection_list import CollectionList 8 | from .index_model import IndexModel 9 | from ...inference.models.index_embed import IndexEmbed 10 | from .backup_model import BackupModel 11 | from .backup_list import BackupList 12 | from .restore_job_model import RestoreJobModel 13 | from .restore_job_list import RestoreJobList 14 | 15 | 16 | __all__ = [ 17 | "CollectionDescription", 18 | "PodSpec", 19 | "PodSpecDefinition", 20 | "ServerlessSpec", 21 | "ServerlessSpecDefinition", 22 | "ByocSpec", 23 | "IndexList", 24 | "CollectionList", 25 | "IndexModel", 26 | "IndexEmbed", 27 | "BackupModel", 28 | "BackupList", 29 | "RestoreJobModel", 30 | "RestoreJobList", 31 | ] 32 | -------------------------------------------------------------------------------- /pinecone/db_control/models/backup_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.backup_list import BackupList as OpenAPIBackupList 3 | from .backup_model import BackupModel 4 | from typing import List 5 | 6 | 7 | class BackupList: 8 | def __init__(self, backup_list: OpenAPIBackupList): 9 | self._backup_list = backup_list 10 | self._backups = [BackupModel(b) for b in self._backup_list.data] 11 | 12 | def names(self) -> List[str]: 13 | return [i.name for i in self._backups] 14 | 15 | def __getitem__(self, key): 16 | if isinstance(key, int): 17 | return self._backups[key] 18 | elif key == "data": 19 | return self._backups 20 | else: 21 | # pagination and any other keys added in the future 22 | return self._backup_list[key] 23 | 24 | def __getattr__(self, attr): 25 | if attr == "data": 26 | return self._backups 27 | else: 28 | # pagination and any other keys added in the future 29 | return getattr(self._backup_list, attr) 30 | 31 | def __len__(self): 32 | return len(self._backups) 33 | 34 | def __iter__(self): 35 | return iter(self._backups) 36 | 37 | def __str__(self): 38 | return str(self._backups) 39 | 40 | def __repr__(self): 41 | raw_dict = self._backup_list.to_dict() 42 | raw_dict["data"] = [i.to_dict() for i in self._backups] 43 | 44 | # Remove keys with value None 45 | for key, value in list(raw_dict.items()): 46 | if value is None: 47 | del raw_dict[key] 48 | 49 | return json.dumps(raw_dict, indent=4) 50 | -------------------------------------------------------------------------------- /pinecone/db_control/models/backup_model.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.backup_model import BackupModel as OpenAPIBackupModel 3 | from pinecone.utils.repr_overrides import custom_serializer 4 | 5 | 6 | class BackupModel: 7 | def __init__(self, backup: OpenAPIBackupModel): 8 | self._backup = backup 9 | 10 | def __getattr__(self, attr): 11 | return getattr(self._backup, attr) 12 | 13 | def __getitem__(self, key): 14 | return self.__getattr__(key) 15 | 16 | def __str__(self): 17 | return self.__repr__() 18 | 19 | def __repr__(self): 20 | return json.dumps(self.to_dict(), indent=4, default=custom_serializer) 21 | 22 | def to_dict(self): 23 | return self._backup.to_dict() 24 | -------------------------------------------------------------------------------- /pinecone/db_control/models/byoc_spec.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass(frozen=True) 5 | class ByocSpec: 6 | """ 7 | ByocSpec represents the configuration used to deploy a BYOC (Bring Your Own Cloud) index. 8 | 9 | To learn more about the options for each configuration, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) 10 | """ 11 | 12 | environment: str 13 | -------------------------------------------------------------------------------- /pinecone/db_control/models/collection_description.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple 2 | 3 | 4 | class CollectionDescription(NamedTuple): 5 | """ 6 | The description of a collection. 7 | """ 8 | 9 | name: str 10 | """ 11 | The name of the collection. 12 | """ 13 | 14 | source: str 15 | """ 16 | The name of the index used to create the collection. 17 | """ 18 | -------------------------------------------------------------------------------- /pinecone/db_control/models/collection_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.collection_list import ( 3 | CollectionList as OpenAPICollectionList, 4 | ) 5 | 6 | 7 | class CollectionList: 8 | """ 9 | A list of collections. 10 | """ 11 | 12 | def __init__(self, collection_list: OpenAPICollectionList): 13 | self.collection_list = collection_list 14 | self.current = 0 15 | 16 | def names(self): 17 | return [i["name"] for i in self.collection_list.collections] 18 | 19 | def __getitem__(self, key): 20 | return self.collection_list.collections[key] 21 | 22 | def __len__(self): 23 | return len(self.collection_list.collections) 24 | 25 | def __iter__(self): 26 | return iter(self.collection_list.collections) 27 | 28 | def __str__(self): 29 | return str(self.collection_list) 30 | 31 | def __repr__(self): 32 | return json.dumps([c.to_dict() for c in self.collection_list.collections], indent=4) 33 | 34 | def __getattr__(self, attr): 35 | return getattr(self.collection_list, attr) 36 | -------------------------------------------------------------------------------- /pinecone/db_control/models/index_description.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple, Dict, Optional, Literal 2 | 3 | 4 | class PodSpecDefinition(NamedTuple): 5 | replicas: int 6 | shards: int 7 | pods: int 8 | pod_type: str 9 | environment: str 10 | metadata_config: Optional[Dict] 11 | 12 | 13 | class ServerlessSpecDefinition(NamedTuple): 14 | cloud: str 15 | region: str 16 | 17 | 18 | PodKey = Literal["pod"] 19 | PodSpec = Dict[PodKey, PodSpecDefinition] 20 | 21 | ServerlessKey = Literal["serverless"] 22 | ServerlessSpec = Dict[ServerlessKey, ServerlessSpecDefinition] 23 | -------------------------------------------------------------------------------- /pinecone/db_control/models/index_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.index_list import IndexList as OpenAPIIndexList 3 | from .index_model import IndexModel 4 | from typing import List 5 | 6 | 7 | class IndexList: 8 | def __init__(self, index_list: OpenAPIIndexList): 9 | self.index_list = index_list 10 | self.indexes = [IndexModel(i) for i in self.index_list.indexes] 11 | self.current = 0 12 | 13 | def names(self) -> List[str]: 14 | return [i.name for i in self.indexes] 15 | 16 | def __getitem__(self, key): 17 | return self.indexes[key] 18 | 19 | def __len__(self): 20 | return len(self.indexes) 21 | 22 | def __iter__(self): 23 | return iter(self.indexes) 24 | 25 | def __str__(self): 26 | return str(self.indexes) 27 | 28 | def __repr__(self): 29 | return json.dumps([i.to_dict() for i in self.indexes], indent=4) 30 | 31 | def __getattr__(self, attr): 32 | return getattr(self.index_list, attr) 33 | -------------------------------------------------------------------------------- /pinecone/db_control/models/index_model.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.db_control.model.index_model import IndexModel as OpenAPIIndexModel 2 | import json 3 | from pinecone.utils.repr_overrides import custom_serializer 4 | 5 | 6 | class IndexModel: 7 | def __init__(self, index: OpenAPIIndexModel): 8 | self.index = index 9 | self.deletion_protection = index.deletion_protection.value 10 | 11 | def __str__(self): 12 | return str(self.index) 13 | 14 | def __getattr__(self, attr): 15 | return getattr(self.index, attr) 16 | 17 | def __getitem__(self, key): 18 | return self.__getattr__(key) 19 | 20 | def __repr__(self): 21 | return json.dumps(self.to_dict(), indent=4, default=custom_serializer) 22 | 23 | def to_dict(self): 24 | return self.index.to_dict() 25 | -------------------------------------------------------------------------------- /pinecone/db_control/models/list_response.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple, Optional, List 2 | 3 | 4 | class Pagination(NamedTuple): 5 | next: str 6 | 7 | 8 | class ListResponse(NamedTuple): 9 | namespace: str 10 | vectors: List 11 | pagination: Optional[Pagination] 12 | -------------------------------------------------------------------------------- /pinecone/db_control/models/restore_job_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.restore_job_list import ( 3 | RestoreJobList as OpenAPIRestoreJobList, 4 | ) 5 | from .restore_job_model import RestoreJobModel 6 | 7 | from datetime import datetime 8 | 9 | 10 | def custom_serializer(obj): 11 | if isinstance(obj, datetime): 12 | return obj.isoformat() 13 | else: 14 | return str(obj) 15 | 16 | 17 | class RestoreJobList: 18 | def __init__(self, restore_job_list: OpenAPIRestoreJobList): 19 | self._restore_job_list = restore_job_list 20 | self._restore_jobs = [RestoreJobModel(r) for r in self._restore_job_list.data] 21 | 22 | def __getitem__(self, key): 23 | if isinstance(key, int): 24 | return self._restore_jobs[key] 25 | elif key == "data": 26 | return self._restore_jobs 27 | else: 28 | # pagination and any other keys added in the future 29 | return self._restore_job_list[key] 30 | 31 | def __getattr__(self, attr): 32 | if attr == "data": 33 | return self._restore_jobs 34 | else: 35 | # pagination and any other keys added in the future 36 | return getattr(self._restore_job_list, attr) 37 | 38 | def __len__(self): 39 | return len(self._restore_jobs) 40 | 41 | def __iter__(self): 42 | return iter(self._restore_jobs) 43 | 44 | def __str__(self): 45 | return str(self._restore_jobs) 46 | 47 | def __repr__(self): 48 | return json.dumps( 49 | [i.to_dict() for i in self._restore_jobs], indent=4, default=custom_serializer 50 | ) 51 | -------------------------------------------------------------------------------- /pinecone/db_control/models/restore_job_model.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.core.openapi.db_control.model.restore_job_model import ( 3 | RestoreJobModel as OpenAPIRestoreJobModel, 4 | ) 5 | from pinecone.utils.repr_overrides import custom_serializer 6 | 7 | 8 | class RestoreJobModel: 9 | def __init__(self, restore_job: OpenAPIRestoreJobModel): 10 | self.restore_job = restore_job 11 | 12 | def __str__(self): 13 | return str(self.restore_job) 14 | 15 | def __getattr__(self, attr): 16 | return getattr(self.restore_job, attr) 17 | 18 | def __getitem__(self, key): 19 | return self.__getattr__(key) 20 | 21 | def __repr__(self): 22 | return json.dumps(self.to_dict(), indent=4, default=custom_serializer) 23 | 24 | def to_dict(self): 25 | return self.restore_job.to_dict() 26 | -------------------------------------------------------------------------------- /pinecone/db_control/models/serverless_spec.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Union 3 | from enum import Enum 4 | 5 | from ..enums import CloudProvider, AwsRegion, GcpRegion, AzureRegion 6 | 7 | 8 | @dataclass(frozen=True) 9 | class ServerlessSpec: 10 | cloud: str 11 | region: str 12 | 13 | def __init__( 14 | self, 15 | cloud: Union[CloudProvider, str], 16 | region: Union[AwsRegion, GcpRegion, AzureRegion, str], 17 | ): 18 | # Convert Enums to their string values if necessary 19 | object.__setattr__(self, "cloud", cloud.value if isinstance(cloud, Enum) else str(cloud)) 20 | object.__setattr__( 21 | self, "region", region.value if isinstance(region, Enum) else str(region) 22 | ) 23 | 24 | def asdict(self): 25 | return {"serverless": {"cloud": self.cloud, "region": self.region}} 26 | -------------------------------------------------------------------------------- /pinecone/db_control/repr_overrides.py: -------------------------------------------------------------------------------- 1 | from pinecone.utils.repr_overrides import install_json_repr_override 2 | from pinecone.core.openapi.db_control.model.collection_model import CollectionModel 3 | 4 | 5 | def install_repr_overrides(): 6 | """ 7 | The generator code uses pprint.pformat to format the repr output 8 | which looks really poor when printing a list of large objects 9 | in a notebook setting. We override it here for a few select models 10 | instead of modifying the generator code because the more compact output 11 | from pprint.pformat seems better for data plane objects such as lists of 12 | query results. 13 | """ 14 | for model in [CollectionModel]: 15 | install_json_repr_override(model) 16 | -------------------------------------------------------------------------------- /pinecone/db_control/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/pinecone/db_control/resources/__init__.py -------------------------------------------------------------------------------- /pinecone/db_control/resources/asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | from .index import IndexResourceAsyncio 2 | from .collection import CollectionResourceAsyncio 3 | 4 | __all__ = ["IndexResourceAsyncio", "CollectionResourceAsyncio"] 5 | -------------------------------------------------------------------------------- /pinecone/db_control/resources/asyncio/collection.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from pinecone.db_control.models import CollectionList 4 | 5 | from pinecone.db_control.request_factory import PineconeDBControlRequestFactory 6 | from pinecone.utils import require_kwargs 7 | 8 | logger = logging.getLogger(__name__) 9 | """ :meta private: """ 10 | 11 | 12 | class CollectionResourceAsyncio: 13 | def __init__(self, index_api): 14 | self.index_api = index_api 15 | 16 | @require_kwargs 17 | async def create(self, *, name: str, source: str): 18 | req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) 19 | await self.index_api.create_collection(create_collection_request=req) 20 | 21 | @require_kwargs 22 | async def list(self) -> CollectionList: 23 | response = await self.index_api.list_collections() 24 | return CollectionList(response) 25 | 26 | @require_kwargs 27 | async def delete(self, *, name: str): 28 | await self.index_api.delete_collection(name) 29 | 30 | @require_kwargs 31 | async def describe(self, *, name: str): 32 | return await self.index_api.describe_collection(name).to_dict() 33 | -------------------------------------------------------------------------------- /pinecone/db_control/resources/sync/__init__.py: -------------------------------------------------------------------------------- 1 | from .index import IndexResource 2 | from .collection import CollectionResource 3 | 4 | __all__ = ["IndexResource", "CollectionResource"] 5 | -------------------------------------------------------------------------------- /pinecone/db_control/resources/sync/collection.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | import logging 3 | 4 | from pinecone.db_control.models import CollectionList 5 | from pinecone.db_control.request_factory import PineconeDBControlRequestFactory 6 | from pinecone.utils import PluginAware, require_kwargs 7 | 8 | logger = logging.getLogger(__name__) 9 | """ :meta private: """ 10 | 11 | if TYPE_CHECKING: 12 | from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi 13 | from pinecone.config import Config, OpenApiConfiguration 14 | 15 | 16 | class CollectionResource(PluginAware): 17 | def __init__( 18 | self, 19 | index_api: "ManageIndexesApi", 20 | config: "Config", 21 | openapi_config: "OpenApiConfiguration", 22 | pool_threads: int, 23 | ): 24 | self.index_api = index_api 25 | """ :meta private: """ 26 | 27 | self.config = config 28 | """ :meta private: """ 29 | 30 | self._openapi_config = openapi_config 31 | """ :meta private: """ 32 | 33 | self._pool_threads = pool_threads 34 | """ :meta private: """ 35 | 36 | super().__init__() # Initialize PluginAware 37 | 38 | @require_kwargs 39 | def create(self, *, name: str, source: str) -> None: 40 | req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) 41 | self.index_api.create_collection(create_collection_request=req) 42 | 43 | @require_kwargs 44 | def list(self) -> CollectionList: 45 | response = self.index_api.list_collections() 46 | return CollectionList(response) 47 | 48 | @require_kwargs 49 | def delete(self, *, name: str) -> None: 50 | self.index_api.delete_collection(name) 51 | 52 | @require_kwargs 53 | def describe(self, *, name: str): 54 | return self.index_api.describe_collection(name).to_dict() 55 | -------------------------------------------------------------------------------- /pinecone/db_control/types/__init__.py: -------------------------------------------------------------------------------- 1 | from .create_index_for_model_embed import CreateIndexForModelEmbedTypedDict 2 | 3 | __all__ = ["CreateIndexForModelEmbedTypedDict"] 4 | -------------------------------------------------------------------------------- /pinecone/db_control/types/create_index_for_model_embed.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, Dict, Union 2 | from pinecone.db_control.enums import Metric 3 | from pinecone.inference import EmbedModel 4 | 5 | 6 | class CreateIndexForModelEmbedTypedDict(TypedDict): 7 | model: Union[EmbedModel, str] 8 | field_map: Dict 9 | metric: Union[Metric, str] 10 | read_parameters: Dict 11 | write_parameters: Dict 12 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/__init__.py: -------------------------------------------------------------------------------- 1 | from .sparse_values import SparseValues 2 | from .vector import Vector 3 | from .fetch_response import FetchResponse 4 | from .search_query import SearchQuery 5 | from .search_query_vector import SearchQueryVector 6 | from .search_rerank import SearchRerank 7 | 8 | __all__ = [ 9 | "SparseValues", 10 | "Vector", 11 | "FetchResponse", 12 | "SearchQuery", 13 | "SearchQueryVector", 14 | "SearchRerank", 15 | ] 16 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/fetch_response.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Dict 3 | 4 | from .vector import Vector 5 | 6 | 7 | @dataclass 8 | class FetchResponse: 9 | namespace: str 10 | vectors: Dict[str, Vector] 11 | usage: Dict[str, int] 12 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/search_query.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional, Any, Dict, Union 3 | from .search_query_vector import SearchQueryVector 4 | from ..types.search_query_vector_typed_dict import SearchQueryVectorTypedDict 5 | 6 | 7 | @dataclass 8 | class SearchQuery: 9 | """ 10 | SearchQuery represents the query when searching within a specific namespace. 11 | """ 12 | 13 | inputs: Dict[str, Any] 14 | """ 15 | The input data to search with. 16 | Required. 17 | """ 18 | 19 | top_k: int 20 | """ 21 | The number of results to return with each search. 22 | Required. 23 | """ 24 | 25 | filter: Optional[Dict[str, Any]] = None 26 | """ 27 | The filter to apply to the search. 28 | Optional. 29 | """ 30 | 31 | vector: Optional[Union[SearchQueryVectorTypedDict, SearchQueryVector]] = None 32 | """ 33 | The vector values to search with. If provided, it overwrites the inputs. 34 | """ 35 | 36 | id: Optional[str] = None 37 | """ 38 | The unique ID of the vector to be used as a query vector. 39 | """ 40 | 41 | def __post_init__(self): 42 | """ 43 | Converts `vector` to a `SearchQueryVectorTypedDict` instance if an enum is provided. 44 | """ 45 | if isinstance(self.vector, SearchQueryVector): 46 | self.vector = self.vector.as_dict() 47 | 48 | def as_dict(self) -> Dict[str, Any]: 49 | """ 50 | Returns the SearchQuery as a dictionary. 51 | """ 52 | d = { 53 | "inputs": self.inputs, 54 | "top_k": self.top_k, 55 | "filter": self.filter, 56 | "vector": self.vector, 57 | "id": self.id, 58 | } 59 | return {k: v for k, v in d.items() if v is not None} 60 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/search_query_vector.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional, List 3 | 4 | 5 | @dataclass 6 | class SearchQueryVector: 7 | """ 8 | SearchQueryVector represents the vector values used to query. 9 | """ 10 | 11 | values: Optional[List[float]] = None 12 | """ 13 | The vector data included in the search request. 14 | Optional. 15 | """ 16 | 17 | sparse_values: Optional[List[float]] = None 18 | """ 19 | The sparse embedding values to search with. 20 | Optional. 21 | """ 22 | 23 | sparse_indices: Optional[List[int]] = None 24 | """ 25 | The sparse embedding indices to search with. 26 | Optional. 27 | """ 28 | 29 | def as_dict(self) -> dict: 30 | """ 31 | Returns the SearchQueryVector as a dictionary. 32 | """ 33 | d = { 34 | "values": self.values, 35 | "sparse_values": self.sparse_values, 36 | "sparse_indices": self.sparse_indices, 37 | } 38 | return {k: v for k, v in d.items() if v is not None} 39 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/sparse_values.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from typing import List 4 | from .utils import DictLike 5 | from ..types import SparseVectorTypedDict 6 | 7 | 8 | @dataclass 9 | class SparseValues(DictLike): 10 | indices: List[int] 11 | values: List[float] 12 | 13 | def to_dict(self) -> SparseVectorTypedDict: 14 | return {"indices": self.indices, "values": self.values} 15 | 16 | @staticmethod 17 | def from_dict(sparse_values_dict: SparseVectorTypedDict) -> "SparseValues": 18 | return SparseValues( 19 | indices=sparse_values_dict["indices"], values=sparse_values_dict["values"] 20 | ) 21 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/utils.py: -------------------------------------------------------------------------------- 1 | class DictLike: 2 | def __getitem__(self, key): 3 | if key in self.__dataclass_fields__: 4 | return getattr(self, key) 5 | raise KeyError(f"{key} is not a valid field") 6 | 7 | def __setitem__(self, key, value): 8 | if key in self.__dataclass_fields__: 9 | setattr(self, key, value) 10 | else: 11 | raise KeyError(f"{key} is not a valid field") 12 | -------------------------------------------------------------------------------- /pinecone/db_data/dataclasses/vector.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from .sparse_values import SparseValues 3 | from .utils import DictLike 4 | from ..types import VectorTypedDict, VectorMetadataTypedDict 5 | 6 | from dataclasses import dataclass, field 7 | 8 | 9 | @dataclass 10 | class Vector(DictLike): 11 | id: str 12 | values: List[float] = field(default_factory=list) 13 | metadata: Optional[VectorMetadataTypedDict] = None 14 | sparse_values: Optional[SparseValues] = None 15 | 16 | def __post_init__(self): 17 | if self.sparse_values is None and len(self.values) == 0: 18 | raise ValueError("The values and sparse_values fields cannot both be empty") 19 | 20 | def to_dict(self) -> VectorTypedDict: 21 | vector_dict: VectorTypedDict = {"id": self.id, "values": self.values} 22 | if self.metadata is not None: 23 | vector_dict["metadata"] = self.metadata 24 | if self.sparse_values is not None: 25 | vector_dict["sparse_values"] = self.sparse_values.to_dict() 26 | return vector_dict 27 | 28 | @staticmethod 29 | def from_dict(vector_dict: VectorTypedDict) -> "Vector": 30 | passed_sparse_values = vector_dict.get("sparse_values") 31 | if passed_sparse_values is not None: 32 | parsed_sparse_values = SparseValues.from_dict(passed_sparse_values) 33 | else: 34 | parsed_sparse_values = None 35 | 36 | return Vector( 37 | id=vector_dict["id"], 38 | values=vector_dict["values"], 39 | metadata=vector_dict.get("metadata"), 40 | sparse_values=parsed_sparse_values, 41 | ) 42 | -------------------------------------------------------------------------------- /pinecone/db_data/models/__init__.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.db_data.models import * 2 | -------------------------------------------------------------------------------- /pinecone/db_data/types/__init__.py: -------------------------------------------------------------------------------- 1 | from .sparse_vector_typed_dict import SparseVectorTypedDict 2 | from .vector_typed_dict import VectorTypedDict 3 | from .vector_metadata_dict import VectorMetadataTypedDict 4 | from .vector_tuple import VectorTuple, VectorTupleWithMetadata 5 | from .query_filter import FilterTypedDict 6 | from .search_rerank_typed_dict import SearchRerankTypedDict 7 | from .search_query_typed_dict import SearchQueryTypedDict 8 | from .search_query_vector_typed_dict import SearchQueryVectorTypedDict 9 | 10 | __all__ = [ 11 | "SparseVectorTypedDict", 12 | "VectorTypedDict", 13 | "VectorMetadataTypedDict", 14 | "VectorTuple", 15 | "VectorTupleWithMetadata", 16 | "FilterTypedDict", 17 | "SearchRerankTypedDict", 18 | "SearchQueryTypedDict", 19 | "SearchQueryVectorTypedDict", 20 | ] 21 | 22 | -------------------------------------------------------------------------------- /pinecone/db_data/types/query_filter.py: -------------------------------------------------------------------------------- 1 | from typing import Literal, Dict, List, Union 2 | 3 | FieldValue = Union[str, int, float, bool] 4 | 5 | ExactMatchFilter = Dict[str, FieldValue] 6 | 7 | EqFilter = Dict[Literal["$eq"], FieldValue] 8 | NeFilter = Dict[Literal["$ne"], FieldValue] 9 | 10 | NumericFieldValue = Union[int, float] 11 | GtFilter = Dict[Literal["$gt"], NumericFieldValue] 12 | GteFilter = Dict[Literal["$gte"], NumericFieldValue] 13 | LtFilter = Dict[Literal["$lt"], NumericFieldValue] 14 | LteFilter = Dict[Literal["$lte"], NumericFieldValue] 15 | 16 | InFilter = Dict[Literal["$in"], List[FieldValue]] 17 | NinFilter = Dict[Literal["$nin"], List[FieldValue]] 18 | 19 | 20 | SimpleFilter = Union[ 21 | ExactMatchFilter, 22 | EqFilter, 23 | NeFilter, 24 | GtFilter, 25 | GteFilter, 26 | LtFilter, 27 | LteFilter, 28 | InFilter, 29 | NinFilter, 30 | ] 31 | AndFilter = Dict[Literal["$and"], List[SimpleFilter]] 32 | 33 | FilterTypedDict = Union[SimpleFilter, AndFilter] 34 | -------------------------------------------------------------------------------- /pinecone/db_data/types/search_query_typed_dict.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, Optional, Union, Dict, Any 2 | from .search_query_vector_typed_dict import SearchQueryVectorTypedDict 3 | 4 | 5 | class SearchQueryTypedDict(TypedDict): 6 | """ 7 | SearchQuery represents the query when searching within a specific namespace. 8 | """ 9 | 10 | inputs: Dict[str, Any] 11 | """ 12 | The input data to search with. 13 | Required. 14 | """ 15 | 16 | top_k: int 17 | """ 18 | The number of results to return with each search. 19 | Required. 20 | """ 21 | 22 | filter: Optional[Dict[str, Any]] 23 | """ 24 | The filter to apply to the search. 25 | Optional. 26 | """ 27 | 28 | vector: Optional[Union[SearchQueryVectorTypedDict]] 29 | """ 30 | The vector values to search with. If provided, it overwrites the inputs. 31 | """ 32 | 33 | id: Optional[str] 34 | """ 35 | The unique ID of the vector to be used as a query vector. 36 | """ 37 | -------------------------------------------------------------------------------- /pinecone/db_data/types/search_query_vector_typed_dict.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, Optional, List 2 | 3 | 4 | class SearchQueryVectorTypedDict(TypedDict): 5 | """ 6 | SearchQueryVector represents the vector values used to query. 7 | """ 8 | 9 | values: Optional[List[float]] 10 | """ 11 | The vector data included in the search request. 12 | Optional. 13 | """ 14 | 15 | sparse_values: Optional[List[float]] 16 | """ 17 | The sparse embedding values to search with. 18 | Optional. 19 | """ 20 | 21 | sparse_indices: Optional[List[int]] 22 | """ 23 | The sparse embedding indices to search with. 24 | Optional. 25 | """ 26 | -------------------------------------------------------------------------------- /pinecone/db_data/types/search_rerank_typed_dict.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, Optional, Union, Dict, Any 2 | from pinecone.inference import RerankModel 3 | 4 | 5 | class SearchRerankTypedDict(TypedDict): 6 | # """ 7 | # SearchRerank represents a rerank request when searching within a specific namespace. 8 | # """ 9 | 10 | model: Union[str, RerankModel] 11 | # model: str 12 | # """ 13 | # The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. 14 | # Required. 15 | # """ 16 | 17 | rank_fields: list[str] 18 | # rank_fields: List[str] 19 | # """ 20 | # The fields to use for reranking. 21 | # Required. 22 | # """ 23 | 24 | top_n: Optional[int] 25 | # """ 26 | # The number of top results to return after reranking. Defaults to top_k. 27 | # Optional. 28 | # """ 29 | 30 | parameters: Optional[Dict[str, Any]] 31 | # """ 32 | # Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#models) 33 | # for available model parameters. 34 | # Optional. 35 | # """ 36 | 37 | query: Optional[str] 38 | # """ 39 | # The query to rerank documents against. If a specific rerank query is specified, it overwrites 40 | # the query input that was provided at the top level. 41 | # """ 42 | -------------------------------------------------------------------------------- /pinecone/db_data/types/sparse_vector_typed_dict.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, List 2 | 3 | 4 | class SparseVectorTypedDict(TypedDict): 5 | indices: List[int] 6 | values: List[float] 7 | -------------------------------------------------------------------------------- /pinecone/db_data/types/vector_metadata_dict.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List, Union 2 | 3 | VectorDictMetadataValue = Union[str, int, float, List[str], List[int], List[float]] 4 | VectorMetadataTypedDict = Dict[str, VectorDictMetadataValue] 5 | -------------------------------------------------------------------------------- /pinecone/db_data/types/vector_tuple.py: -------------------------------------------------------------------------------- 1 | from .vector_metadata_dict import VectorMetadataTypedDict 2 | from typing import Tuple, List 3 | 4 | VectorTuple = Tuple[str, List[float]] 5 | VectorTupleWithMetadata = Tuple[str, List[float], VectorMetadataTypedDict] 6 | -------------------------------------------------------------------------------- /pinecone/db_data/types/vector_typed_dict.py: -------------------------------------------------------------------------------- 1 | from .sparse_vector_typed_dict import SparseVectorTypedDict 2 | from typing import TypedDict, List 3 | 4 | 5 | class VectorTypedDict(TypedDict, total=False): 6 | values: List[float] 7 | metadata: dict 8 | sparse_values: SparseVectorTypedDict 9 | id: str 10 | -------------------------------------------------------------------------------- /pinecone/deprecated_plugins.py: -------------------------------------------------------------------------------- 1 | class DeprecatedPluginError(Exception): 2 | def __init__(self, plugin_name: str) -> None: 3 | message = f"The `{plugin_name}` package has been deprecated. The features from that plugin have been incorporated into the main `pinecone` package with no need for additional plugins. Please remove the `{plugin_name}` package from your dependencies to ensure you have the most up-to-date version of these features." 4 | super().__init__(message) 5 | 6 | 7 | def check_for_deprecated_plugins(): 8 | try: 9 | from pinecone_plugins.inference import __installables__ # type: ignore 10 | 11 | if __installables__ is not None: 12 | raise DeprecatedPluginError("pinecone-plugin-inference") 13 | except ImportError: 14 | pass 15 | 16 | try: 17 | from pinecone_plugins.records import __installables__ # type: ignore 18 | 19 | if __installables__ is not None: 20 | raise DeprecatedPluginError("pinecone-plugin-records") 21 | except ImportError: 22 | pass 23 | -------------------------------------------------------------------------------- /pinecone/exceptions/__init__.py: -------------------------------------------------------------------------------- 1 | from .exceptions import ( 2 | PineconeConfigurationError, 3 | PineconeProtocolError, 4 | ListConversionException, 5 | PineconeException, 6 | PineconeApiAttributeError, 7 | PineconeApiTypeError, 8 | PineconeApiValueError, 9 | PineconeApiKeyError, 10 | PineconeApiException, 11 | NotFoundException, 12 | UnauthorizedException, 13 | ForbiddenException, 14 | ServiceException, 15 | ) 16 | 17 | __all__ = [ 18 | "PineconeException", 19 | "PineconeApiTypeError", 20 | "PineconeApiValueError", 21 | "PineconeApiAttributeError", 22 | "PineconeApiKeyError", 23 | "PineconeApiException", 24 | "NotFoundException", 25 | "UnauthorizedException", 26 | "ForbiddenException", 27 | "ServiceException", 28 | "PineconeProtocolError", 29 | "PineconeConfigurationError", 30 | "ListConversionException", 31 | ] 32 | -------------------------------------------------------------------------------- /pinecone/grpc/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Connecting to Pinecone with GRPC 3 | 4 | The `pinecone.grpc` submodule provides an alternative version of the Pinecone 5 | client that uses gRPC instead of HTTP for data operations. This provides a 6 | significant performance boost for data operations. 7 | 8 | ### Installing the gRPC client 9 | 10 | You must install extra dependencies in order to install the GRPC client. 11 | 12 | #### Installing with pip 13 | 14 | ```bash 15 | # Install the latest version 16 | pip3 install pinecone[grpc] 17 | 18 | # Install a specific version 19 | pip3 install "pinecone[grpc]"==7.0.2 20 | ``` 21 | 22 | #### Installing with poetry 23 | 24 | ```bash 25 | # Install the latest version 26 | poetry add pinecone --extras grpc 27 | 28 | # Install a specific version 29 | poetry add pinecone==7.0.2 --extras grpc 30 | ``` 31 | 32 | ### Using the gRPC client 33 | 34 | ```python 35 | import os 36 | from pinecone.grpc import PineconeGRPC 37 | 38 | client = PineconeGRPC(api_key=os.environ.get("PINECONE_API_KEY")) 39 | 40 | # From this point on, usage is identical to the HTTP client. 41 | index = client.Index(host=os.environ("PINECONE_INDEX_HOST")) 42 | index.query(vector=[...], top_k=10) 43 | ``` 44 | 45 | """ 46 | 47 | from .index_grpc import GRPCIndex 48 | from .pinecone import PineconeGRPC 49 | from .config import GRPCClientConfig 50 | from .future import PineconeGrpcFuture 51 | 52 | from pinecone.db_data.dataclasses import Vector, SparseValues 53 | 54 | from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( 55 | Vector as GRPCVector, 56 | SparseValues as GRPCSparseValues, 57 | DeleteResponse as GRPCDeleteResponse, 58 | ) 59 | 60 | __all__ = [ 61 | "GRPCIndex", 62 | "PineconeGRPC", 63 | "GRPCDeleteResponse", 64 | "GRPCClientConfig", 65 | "GRPCVector", 66 | "GRPCSparseValues", 67 | "Vector", 68 | "SparseValues", 69 | "PineconeGrpcFuture", 70 | ] 71 | -------------------------------------------------------------------------------- /pinecone/grpc/config.py: -------------------------------------------------------------------------------- 1 | from .retry import RetryConfig 2 | from typing import NamedTuple, Optional, Dict 3 | 4 | 5 | class GRPCClientConfig(NamedTuple): 6 | """ 7 | GRPC client configuration options. 8 | 9 | :param secure: Whether to use encrypted protocol (SSL). defaults to True. 10 | :type secure: bool, optional 11 | :param timeout: defaults to 2 seconds. Fail if gateway doesn't receive response within timeout. 12 | :type timeout: int, optional 13 | :param conn_timeout: defaults to 1. Timeout to retry connection if gRPC is unavailable. 0 is no retry. 14 | :type conn_timeout: int, optional 15 | :param reuse_channel: Whether to reuse the same grpc channel for multiple requests 16 | :type reuse_channel: bool, optional 17 | :param retry_config: RetryConfig indicating how requests should be retried 18 | :type retry_config: RetryConfig, optional 19 | :param grpc_channel_options: A dict of gRPC channel arguments 20 | :type grpc_channel_options: Dict[str, str] 21 | :param additional_metadata: Additional metadata to be sent to the server with each request. Note that this 22 | metadata refers to [gRPC metadata](https://grpc.io/docs/guides/metadata/) which is a concept similar 23 | to HTTP headers. This is unrelated to the metadata can be stored with a vector in the index. 24 | :type additional_metadata: Dict[str, str] 25 | """ 26 | 27 | secure: bool = True 28 | timeout: int = 20 29 | conn_timeout: int = 1 30 | reuse_channel: bool = True 31 | retry_config: Optional[RetryConfig] = None 32 | grpc_channel_options: Optional[Dict[str, str]] = None 33 | additional_metadata: Optional[Dict[str, str]] = None 34 | 35 | @classmethod 36 | def _from_dict(cls, kwargs: dict): 37 | cls_kwargs = {kk: vv for kk, vv in kwargs.items() if kk in cls._fields} 38 | return cls(**cls_kwargs) 39 | -------------------------------------------------------------------------------- /pinecone/inference/__init__.py: -------------------------------------------------------------------------------- 1 | from .repl_overrides import install_repl_overrides 2 | from .inference import Inference 3 | from .inference_asyncio import AsyncioInference 4 | from .inference_request_builder import RerankModel, EmbedModel 5 | from .models import ModelInfo, ModelInfoList, EmbeddingsList, RerankResult 6 | 7 | __all__ = [ 8 | "Inference", 9 | "AsyncioInference", 10 | "RerankModel", 11 | "EmbedModel", 12 | "ModelInfo", 13 | "ModelInfoList", 14 | "EmbeddingsList", 15 | "RerankResult", 16 | ] 17 | 18 | install_repl_overrides() 19 | -------------------------------------------------------------------------------- /pinecone/inference/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .embedding_list import EmbeddingsList 2 | from .rerank_result import RerankResult 3 | from .model_info import ModelInfo 4 | from .model_info_list import ModelInfoList 5 | 6 | __all__ = ["EmbeddingsList", "RerankResult", "ModelInfo", "ModelInfoList"] 7 | -------------------------------------------------------------------------------- /pinecone/inference/models/embedding_list.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.inference.models import EmbeddingsList as OpenAPIEmbeddingsList 2 | 3 | 4 | class EmbeddingsList: 5 | """ 6 | A list of embeddings. 7 | """ 8 | 9 | def __init__(self, embeddings_list: OpenAPIEmbeddingsList): 10 | self.embeddings_list = embeddings_list 11 | """ :meta private: """ 12 | 13 | self.current = 0 14 | """ :meta private: """ 15 | 16 | def __getitem__(self, index): 17 | return self.embeddings_list.get("data")[index] 18 | 19 | def __len__(self): 20 | return len(self.embeddings_list.get("data")) 21 | 22 | def __iter__(self): 23 | return iter(self.embeddings_list.get("data")) 24 | 25 | def __str__(self): 26 | return str(self.embeddings_list) 27 | 28 | def __repr__(self): 29 | return repr(self.embeddings_list) 30 | 31 | def __getattr__(self, attr): 32 | return getattr(self.embeddings_list, attr) 33 | -------------------------------------------------------------------------------- /pinecone/inference/models/model_info.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pinecone.utils.repr_overrides import custom_serializer, install_json_repr_override 3 | from pinecone.core.openapi.inference.model.model_info import ModelInfo as OpenAPIModelInfo 4 | from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( 5 | ModelInfoSupportedParameter as OpenAPIModelInfoSupportedParameter, 6 | ) 7 | 8 | for klass in [ 9 | # OpenAPIModelInfo, 10 | # OpenAPIModelInfoMetric, 11 | OpenAPIModelInfoSupportedParameter 12 | # OpenAPIModelInfoSupportedMetrics, 13 | ]: 14 | install_json_repr_override(klass) 15 | 16 | 17 | class ModelInfo: 18 | def __init__(self, model_info: OpenAPIModelInfo): 19 | self._model_info = model_info 20 | if self._model_info.supported_metrics is not None: 21 | self.supported_metrics = [sm.value for sm in self._model_info.supported_metrics.value] 22 | else: 23 | self.supported_metrics = [] 24 | 25 | def __str__(self): 26 | return str(self._model_info) 27 | 28 | def __getattr__(self, attr): 29 | if attr == "supported_metrics": 30 | return self.supported_metrics 31 | else: 32 | return getattr(self._model_info, attr) 33 | 34 | def __getitem__(self, key): 35 | return self.__getattr__(key) 36 | 37 | def __repr__(self): 38 | return json.dumps(self.to_dict(), indent=4, default=custom_serializer) 39 | 40 | def to_dict(self): 41 | raw = self._model_info.to_dict() 42 | raw["supported_metrics"] = self.supported_metrics 43 | return raw 44 | -------------------------------------------------------------------------------- /pinecone/inference/models/model_info_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import List 3 | from pinecone.core.openapi.inference.model.model_info_list import ( 4 | ModelInfoList as OpenAPIModelInfoList, 5 | ) 6 | from .model_info import ModelInfo 7 | from pinecone.utils.repr_overrides import custom_serializer 8 | 9 | 10 | class ModelInfoList: 11 | """ 12 | A list of model information. 13 | """ 14 | 15 | def __init__(self, model_info_list: OpenAPIModelInfoList): 16 | self._model_info_list = model_info_list 17 | self._models = [ModelInfo(model_info) for model_info in model_info_list.models] 18 | 19 | def names(self) -> List[str]: 20 | return [i.name for i in self._models] 21 | 22 | def __getitem__(self, key): 23 | if isinstance(key, int): 24 | return self._models[key] 25 | elif key == "models": 26 | # Return mapped models 27 | return self._models 28 | else: 29 | # any other keys added in the future 30 | return self._model_info_list[key] 31 | 32 | def __getattr__(self, attr): 33 | if attr == "models": 34 | return self._models 35 | else: 36 | # any other keys added in the future 37 | return getattr(self._model_info_list, attr) 38 | 39 | def __len__(self): 40 | return len(self._models) 41 | 42 | def __iter__(self): 43 | return iter(self._models) 44 | 45 | def __str__(self): 46 | return str(self._models) 47 | 48 | def __repr__(self): 49 | raw_dict = self._model_info_list.to_dict() 50 | raw_dict["models"] = [i.to_dict() for i in self._models] 51 | 52 | # Remove keys with value None 53 | for key, value in list(raw_dict.items()): 54 | if value is None: 55 | del raw_dict[key] 56 | 57 | return json.dumps(raw_dict, indent=4, default=custom_serializer) 58 | -------------------------------------------------------------------------------- /pinecone/inference/models/rerank_result.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.inference.models import RerankResult as OpenAPIRerankResult 2 | 3 | 4 | class RerankResult: 5 | """ 6 | A wrapper around OpenAPIRerankResult. 7 | """ 8 | 9 | def __init__(self, rerank_result: OpenAPIRerankResult): 10 | self.rerank_result = rerank_result 11 | 12 | def __str__(self): 13 | return str(self.rerank_result) 14 | 15 | def __repr__(self): 16 | return repr(self.rerank_result) 17 | 18 | def __getattr__(self, attr): 19 | return getattr(self.rerank_result, attr) 20 | -------------------------------------------------------------------------------- /pinecone/inference/resources/asyncio/model.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional 2 | from pinecone.utils import require_kwargs, parse_non_empty_args 3 | from ...models import ModelInfoList, ModelInfo 4 | 5 | 6 | if TYPE_CHECKING: 7 | from pinecone.core.openapi.inference.api.inference_api import AsyncioInferenceApi 8 | 9 | 10 | class ModelAsyncio: 11 | def __init__(self, inference_api: "AsyncioInferenceApi") -> None: 12 | self.__inference_api = inference_api 13 | """ :meta private: """ 14 | 15 | super().__init__() # Initialize PluginAware 16 | 17 | @require_kwargs 18 | async def list( 19 | self, *, type: Optional[str] = None, vector_type: Optional[str] = None 20 | ) -> ModelInfoList: 21 | """ 22 | List all available models. 23 | 24 | :param type: The type of model to list. Either "embed" or "rerank". 25 | :type type: str, optional 26 | 27 | :param vector_type: The type of vector to list. Either "dense" or "sparse". 28 | :type vector_type: str, optional 29 | 30 | :return: A list of models. 31 | """ 32 | args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) 33 | model_list = await self.__inference_api.list_models(**args) 34 | return ModelInfoList(model_list) 35 | 36 | @require_kwargs 37 | async def get(self, model_name: str) -> ModelInfo: 38 | """ 39 | Get a specific model by name. 40 | 41 | :param model_name: The name of the model to get. 42 | :type model_name: str, required 43 | 44 | :return: A model. 45 | """ 46 | model_info = await self.__inference_api.get_model(model_name=model_name) 47 | return ModelInfo(model_info) 48 | -------------------------------------------------------------------------------- /pinecone/langchain_import_warnings.py: -------------------------------------------------------------------------------- 1 | from pinecone.utils import docslinks 2 | 3 | KB_ARTICLE = docslinks["LANGCHAIN_IMPORT_KB_ARTICLE"] 4 | GITHUB_REPO = docslinks["GITHUB_REPO"] 5 | 6 | 7 | def _build_langchain_attribute_error_message(method_name: str): 8 | return f"""{method_name} is not a top-level attribute of the Pinecone class provided by pinecone's official python package developed at {GITHUB_REPO}. You may have a name collision with an export from another dependency in your project that wraps Pinecone functionality and exports a similarly named class. Please refer to the following knowledge base article for more information: {KB_ARTICLE} 9 | """ 10 | -------------------------------------------------------------------------------- /pinecone/models/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from pinecone.db_control.models import * 4 | 5 | warnings.warn( 6 | "The module at `pinecone.models` has moved to `pinecone.db_control.models`. " 7 | "This warning will become an error in a future version of the Pinecone Python SDK.", 8 | DeprecationWarning, 9 | ) 10 | -------------------------------------------------------------------------------- /pinecone/openapi_support/api_version.py: -------------------------------------------------------------------------------- 1 | # This file is generated by codegen/build-oas.sh 2 | # Do not edit this file manually. 3 | 4 | API_VERSION = "2025-04" 5 | APIS_REPO_SHA = "7e21ca9adb6a530ce11909d6209d69551f86e9bd" 6 | -------------------------------------------------------------------------------- /pinecone/openapi_support/auth_util.py: -------------------------------------------------------------------------------- 1 | from .exceptions import PineconeApiValueError 2 | 3 | 4 | class AuthUtil: 5 | @staticmethod 6 | def update_params_for_auth(configuration, endpoint_auth_settings, headers, querys): 7 | """Updates header and query params based on authentication setting. 8 | 9 | :param headers: Header parameters dict to be updated. 10 | :param querys: Query parameters tuple list to be updated. 11 | :param auth_settings: Authentication setting identifiers list. 12 | """ 13 | if not endpoint_auth_settings: 14 | return 15 | 16 | for auth in endpoint_auth_settings: 17 | auth_setting = configuration.auth_settings().get(auth) 18 | if auth_setting: 19 | if auth_setting["in"] == "header": 20 | if auth_setting["type"] != "http-signature": 21 | headers[auth_setting["key"]] = auth_setting["value"] 22 | elif auth_setting["in"] == "query": 23 | querys.append((auth_setting["key"], auth_setting["value"])) 24 | else: 25 | raise PineconeApiValueError( 26 | "Authentication token must be in `query` or `header`" 27 | ) 28 | -------------------------------------------------------------------------------- /pinecone/openapi_support/cached_class_property.py: -------------------------------------------------------------------------------- 1 | class cached_class_property: 2 | def __init__(self, func) -> None: 3 | self.func = func 4 | self.attr_name = f"__cached_{func.__name__}" 5 | 6 | def __get__(self, instance, owner): 7 | # The value is stored on the owner (the class), not the instance. 8 | if hasattr(owner, self.attr_name): 9 | return getattr(owner, self.attr_name) 10 | value = self.func(owner) 11 | setattr(owner, self.attr_name, value) 12 | return value 13 | -------------------------------------------------------------------------------- /pinecone/openapi_support/configuration.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/pinecone/openapi_support/configuration.py -------------------------------------------------------------------------------- /pinecone/openapi_support/configuration_lazy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lazy import for the Configuration class to avoid loading the entire openapi_support package. 3 | """ 4 | 5 | from ..config.openapi_configuration import Configuration 6 | 7 | __all__ = ["Configuration"] 8 | -------------------------------------------------------------------------------- /pinecone/openapi_support/constants.py: -------------------------------------------------------------------------------- 1 | OPENAPI_ENDPOINT_PARAMS = ( 2 | "_return_http_data_only", 3 | "_preload_content", 4 | "_request_timeout", 5 | "_check_input_type", 6 | "_check_return_type", 7 | "async_req", 8 | "async_threadpool_executor", 9 | ) 10 | -------------------------------------------------------------------------------- /pinecone/openapi_support/exceptions.py: -------------------------------------------------------------------------------- 1 | from pinecone.exceptions import * # noqa: F403 2 | -------------------------------------------------------------------------------- /pinecone/openapi_support/retry_aiohttp.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Optional 3 | from aiohttp_retry import RetryOptionsBase, EvaluateResponseCallbackType, ClientResponse 4 | import logging 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | class JitterRetry(RetryOptionsBase): 10 | """https://github.com/inyutin/aiohttp_retry/issues/44.""" 11 | 12 | def __init__( 13 | self, 14 | attempts: int = 3, # How many times we should retry 15 | start_timeout: float = 0.1, # Base timeout time, then it exponentially grow 16 | max_timeout: float = 5.0, # Max possible timeout between tries 17 | statuses: Optional[set[int]] = None, # On which statuses we should retry 18 | exceptions: Optional[set[type[Exception]]] = None, # On which exceptions we should retry 19 | methods: Optional[set[str]] = None, # On which HTTP methods we should retry 20 | retry_all_server_errors: bool = True, 21 | evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None, 22 | ) -> None: 23 | super().__init__( 24 | attempts=attempts, 25 | statuses=statuses, 26 | exceptions=exceptions, 27 | methods=methods, 28 | retry_all_server_errors=retry_all_server_errors, 29 | evaluate_response_callback=evaluate_response_callback, 30 | ) 31 | 32 | self._start_timeout: float = start_timeout 33 | self._max_timeout: float = max_timeout 34 | 35 | def get_timeout( 36 | self, 37 | attempt: int, 38 | response: Optional[ClientResponse] = None, # noqa: ARG002 39 | ) -> float: 40 | logger.debug(f"JitterRetry get_timeout: attempt={attempt}, response={response}") 41 | """Return timeout with exponential backoff.""" 42 | jitter = random.uniform(0, 0.1) 43 | timeout = self._start_timeout * (2 ** (attempt - 1)) 44 | return min(timeout + jitter, self._max_timeout) 45 | -------------------------------------------------------------------------------- /pinecone/openapi_support/retry_urllib3.py: -------------------------------------------------------------------------------- 1 | import random 2 | from urllib3.util.retry import Retry 3 | import logging 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | class JitterRetry(Retry): 9 | """ 10 | Retry with exponential back‑off with jitter. 11 | 12 | The Retry class is being extended as built-in support for jitter was added only in urllib3 2.0.0. 13 | Jitter logic is following the official implementation with a constant jitter factor: https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py 14 | """ 15 | 16 | def get_backoff_time(self) -> float: 17 | backoff_value = super().get_backoff_time() 18 | jitter = random.random() * 0.25 19 | backoff_value += jitter 20 | logger.debug(f"Calculating retry backoff: {backoff_value} (jitter: {jitter})") 21 | return backoff_value 22 | -------------------------------------------------------------------------------- /pinecone/openapi_support/types.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict, Dict, Union 2 | 3 | 4 | class PropertyValidationTypedDict(TypedDict, total=False): 5 | max_length: int 6 | min_length: int 7 | max_items: int 8 | min_items: int 9 | exclusive_maximum: Union[int, float] 10 | inclusive_maximum: Union[int, float] 11 | exclusive_minimum: Union[int, float] 12 | inclusive_minimum: Union[int, float] 13 | regex: Dict[str, str] 14 | multiple_of: int 15 | -------------------------------------------------------------------------------- /pinecone/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/pinecone/py.typed -------------------------------------------------------------------------------- /pinecone/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .check_kwargs import check_kwargs 2 | from .version import __version__ 3 | from .user_agent import get_user_agent 4 | from .deprecation_notice import warn_deprecated 5 | from .fix_tuple_length import fix_tuple_length 6 | from .convert_to_list import convert_to_list 7 | from .convert_enum_to_string import convert_enum_to_string 8 | from .normalize_host import normalize_host 9 | from .setup_openapi_client import ( 10 | setup_openapi_client, 11 | build_plugin_setup_client, 12 | setup_async_openapi_client, 13 | ) 14 | from .parse_args import parse_non_empty_args 15 | from .docslinks import docslinks 16 | from .repr_overrides import install_json_repr_override 17 | from .error_handling import validate_and_convert_errors 18 | from .plugin_aware import PluginAware 19 | from .filter_dict import filter_dict 20 | from .require_kwargs import require_kwargs 21 | 22 | __all__ = [ 23 | "PluginAware", 24 | "check_kwargs", 25 | "__version__", 26 | "get_user_agent", 27 | "warn_deprecated", 28 | "fix_tuple_length", 29 | "convert_to_list", 30 | "normalize_host", 31 | "setup_openapi_client", 32 | "setup_async_openapi_client", 33 | "build_plugin_setup_client", 34 | "parse_non_empty_args", 35 | "docslinks", 36 | "install_json_repr_override", 37 | "validate_and_convert_errors", 38 | "convert_enum_to_string", 39 | "filter_dict", 40 | "require_kwargs", 41 | ] 42 | -------------------------------------------------------------------------------- /pinecone/utils/check_kwargs.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import logging 3 | 4 | 5 | def check_kwargs(caller, given): 6 | argspec = inspect.getfullargspec(caller) 7 | diff = set(given).difference(argspec.args) 8 | if diff: 9 | logging.exception( 10 | caller.__name__ + " had unexpected keyword argument(s): " + ", ".join(diff), 11 | exc_info=False, 12 | ) 13 | -------------------------------------------------------------------------------- /pinecone/utils/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from .version import __version__ 4 | 5 | MAX_MSG_SIZE = 128 * 1024 * 1024 6 | 7 | MAX_ID_LENGTH = int(os.getenv("PINECONE_MAX_ID_LENGTH", default="64")) 8 | 9 | REQUEST_ID: str = "request_id" 10 | 11 | CLIENT_VERSION = __version__ 12 | CLIENT_ID = f"python-client-{CLIENT_VERSION}" 13 | 14 | REQUIRED_VECTOR_FIELDS = {"id"} 15 | OPTIONAL_VECTOR_FIELDS = {"values", "sparse_values", "metadata"} 16 | 17 | SOURCE_TAG = "source_tag" 18 | -------------------------------------------------------------------------------- /pinecone/utils/convert_enum_to_string.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from enum import Enum 3 | 4 | 5 | def convert_enum_to_string(value: Union[Enum, str]) -> str: 6 | if isinstance(value, Enum): 7 | return str(value.value) 8 | return value 9 | -------------------------------------------------------------------------------- /pinecone/utils/convert_to_list.py: -------------------------------------------------------------------------------- 1 | from ..exceptions.exceptions import ListConversionException 2 | 3 | 4 | def convert_to_list(obj): 5 | class_name = obj.__class__.__name__ 6 | 7 | if class_name == "list": 8 | return obj 9 | elif hasattr(obj, "tolist") and callable(getattr(obj, "tolist")): 10 | return obj.tolist() 11 | elif obj is None or isinstance(obj, str) or isinstance(obj, dict): 12 | # The string and dictionary classes in python can be passed to list() 13 | # but they're not going to yield sensible results for our use case. 14 | raise ListConversionException( 15 | f"Expected a list or list-like data structure, but got: {obj}" 16 | ) 17 | else: 18 | try: 19 | return list(obj) 20 | except Exception as e: 21 | raise ListConversionException( 22 | f"Expected a list or list-like data structure, but got: {obj}" 23 | ) from e 24 | -------------------------------------------------------------------------------- /pinecone/utils/deprecation_notice.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | 4 | def warn_deprecated(description: str, deprecated_in: str, removal_in: str): 5 | message = f"DEPRECATED since v{deprecated_in} [Will be removed in v{removal_in}]: {description}" 6 | warnings.warn(message, FutureWarning) 7 | -------------------------------------------------------------------------------- /pinecone/utils/docslinks.py: -------------------------------------------------------------------------------- 1 | def versioned_url(template: str): 2 | return lambda version: template.format(version) 3 | 4 | 5 | docslinks = { 6 | "README": "https://github.com/pinecone-io/pinecone-python-client/blob/main/README.md", 7 | "GITHUB_REPO": "https://github.com/pinecone-io/pinecone-python-client", 8 | "LANGCHAIN_IMPORT_KB_ARTICLE": "https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain", 9 | "API_DESCRIBE_INDEX": versioned_url( 10 | "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index" 11 | ), 12 | } 13 | -------------------------------------------------------------------------------- /pinecone/utils/error_handling.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from functools import wraps 3 | 4 | 5 | class ProtocolError(Exception): 6 | """Raised when there is a protocol error in the connection.""" 7 | 8 | pass 9 | 10 | 11 | def validate_and_convert_errors(func): 12 | @wraps(func) 13 | def inner_func(*args, **kwargs): 14 | try: 15 | return func(*args, **kwargs) 16 | except Exception as e: 17 | # Lazy import of urllib3 exceptions 18 | from urllib3.exceptions import MaxRetryError, ProtocolError as Urllib3ProtocolError 19 | 20 | if isinstance(e, MaxRetryError): 21 | if isinstance(e.reason, Urllib3ProtocolError): 22 | raise ProtocolError(f"Failed to connect to {e.url}") from e 23 | else: 24 | raise e from e 25 | elif isinstance(e, Urllib3ProtocolError): 26 | raise ProtocolError( 27 | "Connection failed. Please verify that the index host is correct and accessible." 28 | ) from e 29 | else: 30 | raise e from e 31 | 32 | # Override signature 33 | sig = inspect.signature(func) 34 | inner_func.__signature__ = sig 35 | return inner_func 36 | -------------------------------------------------------------------------------- /pinecone/utils/filter_dict.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Dict 2 | 3 | 4 | def filter_dict(d: Dict, allowed_keys: Tuple[str, ...]) -> Dict: 5 | return {k: v for k, v in d.items() if k in allowed_keys} 6 | -------------------------------------------------------------------------------- /pinecone/utils/fix_tuple_length.py: -------------------------------------------------------------------------------- 1 | def fix_tuple_length(t, n): 2 | """Extend tuple t to length n by adding None items at the end of the tuple. Return the new tuple.""" 3 | return t + ((None,) * (n - len(t))) if len(t) < n else t 4 | -------------------------------------------------------------------------------- /pinecone/utils/normalize_host.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | 4 | def normalize_host(host: Optional[str]) -> str: 5 | if host is None: 6 | return "" 7 | if host.startswith("https://"): 8 | return host 9 | if host.startswith("http://"): 10 | return host 11 | return "https://" + host 12 | -------------------------------------------------------------------------------- /pinecone/utils/parse_args.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple, Any, Dict 2 | 3 | 4 | def parse_non_empty_args(args: List[Tuple[str, Any]]) -> Dict[str, Any]: 5 | return {arg_name: val for arg_name, val in args if val is not None} 6 | -------------------------------------------------------------------------------- /pinecone/utils/repr_overrides.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime 3 | 4 | 5 | def custom_serializer(obj): 6 | if isinstance(obj, datetime): 7 | return obj.isoformat() 8 | try: 9 | # First try to get a dictionary representation if available 10 | if hasattr(obj, "to_dict"): 11 | return obj.to_dict() 12 | # Fall back to string representation 13 | return str(obj) 14 | except (TypeError, RecursionError): 15 | # If we hit any serialization issues, return a safe string representation 16 | return f"<{obj.__class__.__name__} object>" 17 | 18 | 19 | def install_json_repr_override(klass): 20 | klass.__repr__ = lambda self: json.dumps( 21 | self.to_dict(), indent=4, sort_keys=False, default=custom_serializer 22 | ) 23 | -------------------------------------------------------------------------------- /pinecone/utils/require_kwargs.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import inspect 3 | 4 | 5 | def require_kwargs(func): 6 | @functools.wraps(func) 7 | def wrapper(*args, **kwargs): 8 | if len(args) > 1: # First arg is self 9 | param_names = list(inspect.signature(func).parameters.keys())[1:] # Skip self 10 | raise TypeError( 11 | f"{func.__name__}() requires keyword arguments. " 12 | f"Please use {func.__name__}({', '.join(f'{name}=value' for name in param_names)})" 13 | ) 14 | return func(*args, **kwargs) 15 | 16 | return wrapper 17 | -------------------------------------------------------------------------------- /pinecone/utils/tqdm.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | __all__ = ["tqdm"] 4 | 5 | try: 6 | # Suppress the specific tqdm warning about IProgress 7 | with warnings.catch_warnings(): 8 | warnings.filterwarnings("ignore", category=UserWarning, module="tqdm") 9 | from tqdm.auto import tqdm 10 | except ImportError: 11 | # Fallback: define a dummy tqdm that supports the same interface. 12 | class tqdm: # type: ignore 13 | def __init__(self, iterable=None, total=None, desc="", **kwargs): 14 | self.iterable = iterable 15 | self.total = total 16 | self.desc = desc 17 | # You can store additional kwargs if needed 18 | 19 | def __iter__(self): 20 | # Just iterate over the underlying iterable 21 | for item in self.iterable: 22 | yield item 23 | 24 | def update(self, n=1): 25 | # No-op: This stub doesn't track progress 26 | pass 27 | 28 | def __enter__(self): 29 | # Allow use as a context manager 30 | return self 31 | 32 | def __exit__(self, exc_type, exc_value, traceback): 33 | # Nothing to cleanup 34 | pass 35 | -------------------------------------------------------------------------------- /pinecone/utils/user_agent.py: -------------------------------------------------------------------------------- 1 | from .version import __version__ 2 | from .constants import SOURCE_TAG 3 | import re 4 | 5 | 6 | def _build_source_tag_field(source_tag): 7 | # normalize source tag 8 | # 1. Lowercase 9 | # 2. Limit charset to [a-z0-9_ :] 10 | # 3. Trim left/right whitespace 11 | # 4. Condense multiple spaces to one, and replace with underscore 12 | tag = source_tag.lower() 13 | tag = re.sub(r"[^a-z0-9_ :]", "", tag) 14 | tag = tag.strip() 15 | tag = "_".join(tag.split()) 16 | return f"{SOURCE_TAG}={tag}" 17 | 18 | 19 | def _get_user_agent(client_id, config): 20 | user_agent = ( 21 | f"{client_id}; {_build_source_tag_field(config.source_tag)}" 22 | if config.source_tag 23 | else client_id 24 | ) 25 | return user_agent 26 | 27 | 28 | def get_user_agent(config): 29 | return _get_user_agent(f"python-client-{__version__}", config) 30 | 31 | 32 | def get_user_agent_grpc(config): 33 | return _get_user_agent(f"python-client[grpc]-{__version__}", config) 34 | -------------------------------------------------------------------------------- /pinecone/utils/version.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def get_version(): 5 | return Path(__file__).parent.parent.joinpath("__version__").read_text().strip() 6 | 7 | 8 | __version__ = get_version() 9 | """ The version of the `pinecone` package""" 10 | -------------------------------------------------------------------------------- /scripts/test-async-retry.py: -------------------------------------------------------------------------------- 1 | import dotenv 2 | import asyncio 3 | import logging 4 | from pinecone import PineconeAsyncio 5 | 6 | dotenv.load_dotenv() 7 | 8 | logging.basicConfig(level=logging.DEBUG) 9 | 10 | 11 | async def main(): 12 | async with PineconeAsyncio(host="http://localhost:8000") as pc: 13 | await pc.db.index.list() 14 | 15 | 16 | asyncio.run(main()) 17 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/__init__.py -------------------------------------------------------------------------------- /tests/dependency/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/dependency/__init__.py -------------------------------------------------------------------------------- /tests/dependency/conftest.py: -------------------------------------------------------------------------------- 1 | import dotenv 2 | 3 | dotenv.load_dotenv() 4 | -------------------------------------------------------------------------------- /tests/dependency/grpc/test_sanity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | import time 4 | from pinecone.grpc import PineconeGRPC 5 | import logging 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | @pytest.fixture 11 | def index_name(): 12 | name = os.environ.get("INDEX_NAME", None) 13 | if name is None: 14 | raise "INDEX_NAME environment variable is not set" 15 | return name 16 | 17 | 18 | @pytest.fixture 19 | def client(): 20 | return PineconeGRPC(api_key=os.environ.get("PINECONE_API_KEY")) 21 | 22 | 23 | class TestSanityRest: 24 | def test_sanity(self, index_name, client): 25 | logger.info("Testing with index name: %s", index_name) 26 | assert index_name != "" 27 | 28 | # Verify index exists with expected properties 29 | assert index_name in client.list_indexes().names() 30 | description = client.describe_index(name=index_name) 31 | assert description.dimension == 2 32 | logger.info("Index description: %s", description) 33 | 34 | idx = client.Index(index_name) 35 | resp = idx.upsert(vectors=[("1", [1.0, 2.0]), ("2", [3.0, 4.0]), ("3", [5.0, 6.0])]) 36 | logger.info("Upsert response: %s", resp) 37 | 38 | # Wait for index freshness 39 | time.sleep(30) 40 | 41 | # Check the vector count reflects some data has been upserted 42 | description = idx.describe_index_stats() 43 | logger.info("Index stats: %s", description) 44 | assert description.dimension == 2 45 | assert description.total_vector_count >= 3 46 | 47 | # Query for results 48 | query_results = idx.query(id="1", top_k=10, include_values=True) 49 | logger.info("Query results: %s", query_results) 50 | assert query_results.matches[0].id == "1" 51 | assert len(query_results.matches) == 3 52 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | import dotenv 2 | 3 | dotenv.load_dotenv() 4 | -------------------------------------------------------------------------------- /tests/integration/conftest.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from pinecone import Pinecone 3 | from datetime import datetime, timedelta 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | def pytest_sessionfinish(session, exitstatus): 9 | """ 10 | Hook that runs after all tests have completed. 11 | This is a good place to clean up any resources that were created during the test session. 12 | """ 13 | logger.info("Running final cleanup after all tests...") 14 | 15 | try: 16 | # Initialize Pinecone client 17 | pc = Pinecone() 18 | 19 | # Get all indexes 20 | indexes = pc.list_indexes() 21 | 22 | # Find test indexes (those created during this test run) 23 | test_indexes = [idx for idx in indexes.names() if idx.startswith("test-")] 24 | 25 | # Delete test indexes that are older than 1 hour (in case of failed cleanup) 26 | for index_name in test_indexes: 27 | try: 28 | description = pc.describe_index(name=index_name) 29 | created_at = datetime.fromisoformat(description.created_at.replace("Z", "+00:00")) 30 | 31 | if datetime.now(created_at.tzinfo) - created_at > timedelta(hours=1): 32 | logger.info(f"Cleaning up old test index: {index_name}") 33 | pc.delete_index(name=index_name, timeout=-1) 34 | except Exception as e: 35 | logger.warning(f"Failed to clean up index {index_name}: {str(e)}") 36 | 37 | except Exception as e: 38 | logger.error(f"Error during final cleanup: {str(e)}") 39 | 40 | logger.info("Final cleanup completed") 41 | -------------------------------------------------------------------------------- /tests/integration/control/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/pod/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/pod/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/pod/test_configure_pod_index.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | class TestConfigurePodIndex: 5 | def test_configure_pod_index(self, client, ready_index): 6 | time.sleep(10) # Wait a little more, just in case. 7 | client.configure_index(ready_index, replicas=1, pod_type="p1.x1") 8 | -------------------------------------------------------------------------------- /tests/integration/control/pod/test_create_index.py: -------------------------------------------------------------------------------- 1 | class TestCreateIndexPods: 2 | def test_create_with_optional_tags(self, client, create_index_params): 3 | index_name = create_index_params["name"] 4 | tags = {"foo": "FOO", "bar": "BAR"} 5 | create_index_params["tags"] = tags 6 | 7 | client.create_index(**create_index_params) 8 | 9 | desc = client.describe_index(name=index_name) 10 | assert desc.tags.to_dict() == tags 11 | -------------------------------------------------------------------------------- /tests/integration/control/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/resources/backup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/backup/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/resources/collections/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/collections/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/resources/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import uuid 4 | import logging 5 | import dotenv 6 | from pinecone import Pinecone, PodIndexEnvironment 7 | from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params 8 | 9 | dotenv.load_dotenv() 10 | 11 | logger = logging.getLogger(__name__) 12 | """ :meta private: """ 13 | 14 | # Generate a unique ID for the entire test run 15 | RUN_ID = str(uuid.uuid4()) 16 | 17 | 18 | @pytest.fixture() 19 | def pc(): 20 | return Pinecone() 21 | 22 | 23 | @pytest.fixture() 24 | def create_index_params(request): 25 | return default_create_index_params(request, RUN_ID) 26 | 27 | 28 | @pytest.fixture() 29 | def index_name(create_index_params): 30 | return create_index_params["name"] 31 | 32 | 33 | @pytest.fixture() 34 | def index_tags(create_index_params): 35 | return create_index_params["tags"] 36 | 37 | 38 | @pytest.fixture 39 | def pod_environment(): 40 | return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) 41 | 42 | 43 | @pytest.fixture() 44 | def ready_sl_index(pc, index_name, create_index_params): 45 | create_index_params["timeout"] = None 46 | pc.create_index(**create_index_params) 47 | yield index_name 48 | pc.db.index.delete(name=index_name, timeout=-1) 49 | 50 | 51 | @pytest.fixture() 52 | def notready_sl_index(pc, index_name, create_index_params): 53 | pc.create_index(**create_index_params, timeout=-1) 54 | yield index_name 55 | 56 | 57 | def pytest_sessionfinish(session, exitstatus): 58 | """ 59 | Hook that runs after all tests have completed. 60 | This is a good place to clean up any resources that were created during the test session. 61 | """ 62 | logger.info("Running final cleanup after all tests...") 63 | 64 | pc = Pinecone() 65 | delete_indexes_from_run(pc, RUN_ID) 66 | delete_backups_from_run(pc, RUN_ID) 67 | -------------------------------------------------------------------------------- /tests/integration/control/resources/index/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/index/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/resources/index/test_delete.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/index/test_delete.py -------------------------------------------------------------------------------- /tests/integration/control/resources/index/test_has.py: -------------------------------------------------------------------------------- 1 | from ....helpers import random_string 2 | 3 | 4 | class TestHasIndex: 5 | def test_index_exists_success(self, pc, create_index_params): 6 | name = create_index_params["name"] 7 | pc.db.index.create(**create_index_params) 8 | has_index = pc.db.index.has(name=name) 9 | assert has_index == True 10 | 11 | def test_index_does_not_exist(self, pc): 12 | name = random_string(8) 13 | has_index = pc.db.index.has(name=name) 14 | assert has_index == False 15 | 16 | def test_has_index_with_null_index_name(self, pc): 17 | has_index = pc.db.index.has(name="") 18 | assert has_index == False 19 | -------------------------------------------------------------------------------- /tests/integration/control/resources/index/test_list.py: -------------------------------------------------------------------------------- 1 | from pinecone import IndexModel 2 | 3 | 4 | class TestListIndexes: 5 | def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_index_params): 6 | list_response = pc.db.index.list() 7 | assert len(list_response.indexes) != 0 8 | assert isinstance(list_response.indexes[0], IndexModel) 9 | 10 | created_index = [index for index in list_response.indexes if index.name == ready_sl_index][ 11 | 0 12 | ] 13 | assert created_index.name == ready_sl_index 14 | assert created_index.dimension == create_index_params["dimension"] 15 | assert created_index.metric == create_index_params["metric"] 16 | assert ready_sl_index in created_index.host 17 | 18 | def test_list_indexes_includes_not_ready_indexes(self, pc, notready_sl_index): 19 | list_response = pc.db.index.list() 20 | assert len(list_response.indexes) != 0 21 | assert isinstance(list_response.indexes[0], IndexModel) 22 | 23 | created_index = [ 24 | index for index in list_response.indexes if index.name == notready_sl_index 25 | ][0] 26 | assert created_index.name == notready_sl_index 27 | assert notready_sl_index in created_index.name 28 | -------------------------------------------------------------------------------- /tests/integration/control/resources/restore_job/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/resources/restore_job/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/resources/restore_job/test_describe.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import Pinecone, PineconeApiException 3 | import logging 4 | from datetime import datetime 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | class TestRestoreJobDescribe: 10 | def test_describe_restore_job(self, pc: Pinecone): 11 | jobs = pc.db.restore_job.list() 12 | assert len(jobs.data) >= 1 13 | 14 | restore_job_id = jobs.data[0].restore_job_id 15 | restore_job = pc.db.restore_job.describe(job_id=restore_job_id) 16 | logger.debug(f"Restore job: {restore_job}") 17 | 18 | assert restore_job.restore_job_id == restore_job_id 19 | assert restore_job.backup_id is not None 20 | assert isinstance(restore_job.status, str) 21 | assert isinstance(restore_job.backup_id, str) 22 | if restore_job.status == "Completed": 23 | assert isinstance(restore_job.completed_at, datetime) 24 | assert isinstance(restore_job.created_at, datetime) 25 | if restore_job.status != "Pending": 26 | assert isinstance(restore_job.percent_complete, float) 27 | assert isinstance(restore_job.target_index_id, str) 28 | assert isinstance(restore_job.target_index_name, str) 29 | 30 | def test_describe_restore_job_legacy_syntax(self, pc: Pinecone): 31 | jobs = pc.list_restore_jobs() 32 | assert len(jobs.data) >= 1 33 | 34 | restore_job_id = jobs.data[0].restore_job_id 35 | restore_job = pc.describe_restore_job(job_id=restore_job_id) 36 | logger.debug(f"Restore job: {restore_job}") 37 | 38 | def test_describe_restore_job_with_invalid_job_id(self, pc: Pinecone): 39 | with pytest.raises(PineconeApiException): 40 | pc.db.restore_job.describe(job_id="invalid") 41 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control/serverless/__init__.py -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_configure_index_deletion_protection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import DeletionProtection 3 | 4 | 5 | class TestDeletionProtection: 6 | @pytest.mark.parametrize( 7 | "dp_enabled,dp_disabled", 8 | [("enabled", "disabled"), (DeletionProtection.ENABLED, DeletionProtection.DISABLED)], 9 | ) 10 | def test_deletion_protection(self, client, create_sl_index_params, dp_enabled, dp_disabled): 11 | name = create_sl_index_params["name"] 12 | client.create_index(**create_sl_index_params, deletion_protection=dp_enabled) 13 | desc = client.describe_index(name) 14 | assert desc.deletion_protection == "enabled" 15 | 16 | with pytest.raises(Exception) as e: 17 | client.delete_index(name) 18 | assert "Deletion protection is enabled for this index" in str(e.value) 19 | 20 | client.configure_index(name, deletion_protection=dp_disabled) 21 | desc = client.describe_index(name) 22 | assert desc.deletion_protection == "disabled" 23 | 24 | client.delete_index(name) 25 | 26 | @pytest.mark.parametrize("deletion_protection", ["invalid"]) 27 | def test_deletion_protection_invalid_options( 28 | self, client, create_sl_index_params, deletion_protection 29 | ): 30 | with pytest.raises(Exception) as e: 31 | client.create_index(**create_sl_index_params, deletion_protection=deletion_protection) 32 | assert "deletion_protection must be either 'enabled' or 'disabled'" in str(e.value) 33 | 34 | @pytest.mark.parametrize("deletion_protection", ["invalid"]) 35 | def test_configure_deletion_protection_invalid_options( 36 | self, client, create_sl_index_params, deletion_protection 37 | ): 38 | with pytest.raises(Exception) as e: 39 | client.create_index(**create_sl_index_params, deletion_protection=deletion_protection) 40 | assert "deletion_protection must be either 'enabled' or 'disabled'" in str(e.value) 41 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_configure_index_tags.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestIndexTags: 5 | def test_index_tags_none_by_default(self, client, ready_sl_index): 6 | client.describe_index(name=ready_sl_index) 7 | assert client.describe_index(name=ready_sl_index).tags is None 8 | 9 | def test_add_index_tags(self, client, ready_sl_index): 10 | client.configure_index(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) 11 | assert client.describe_index(name=ready_sl_index).tags.to_dict() == { 12 | "foo": "FOO", 13 | "bar": "BAR", 14 | } 15 | 16 | def test_remove_tags_by_setting_empty_value_for_key(self, client, ready_sl_index): 17 | client.configure_index(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) 18 | client.configure_index(name=ready_sl_index, tags={}) 19 | assert client.describe_index(name=ready_sl_index).tags.to_dict() == { 20 | "foo": "FOO", 21 | "bar": "BAR", 22 | } 23 | 24 | client.configure_index(name=ready_sl_index, tags={"foo": ""}) 25 | assert client.describe_index(name=ready_sl_index).tags.to_dict() == {"bar": "BAR"} 26 | 27 | def test_merge_new_tags_with_existing_tags(self, client, ready_sl_index): 28 | client.configure_index(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) 29 | client.configure_index(name=ready_sl_index, tags={"baz": "BAZ"}) 30 | assert client.describe_index(name=ready_sl_index).tags.to_dict() == { 31 | "foo": "FOO", 32 | "bar": "BAR", 33 | "baz": "BAZ", 34 | } 35 | 36 | @pytest.mark.skip(reason="Backend bug filed") 37 | def test_remove_all_tags(self, client, ready_sl_index): 38 | client.configure_index(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) 39 | client.configure_index(name=ready_sl_index, tags={"foo": "", "bar": ""}) 40 | assert client.describe_index(name=ready_sl_index).tags is None 41 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_create_index_api_errors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import PineconeApiException, PineconeApiValueError 3 | 4 | 5 | class TestCreateIndexApiErrorCases: 6 | def test_create_index_with_invalid_name(self, client, create_sl_index_params): 7 | create_sl_index_params["name"] = "Invalid-name" 8 | with pytest.raises(PineconeApiException): 9 | client.create_index(**create_sl_index_params) 10 | 11 | def test_create_index_invalid_metric(self, client, create_sl_index_params): 12 | create_sl_index_params["metric"] = "invalid" 13 | with pytest.raises(PineconeApiValueError): 14 | client.create_index(**create_sl_index_params) 15 | 16 | def test_create_index_with_invalid_neg_dimension(self, client, create_sl_index_params): 17 | create_sl_index_params["dimension"] = -1 18 | with pytest.raises(PineconeApiValueError): 19 | client.create_index(**create_sl_index_params) 20 | 21 | def test_create_index_that_already_exists(self, client, create_sl_index_params): 22 | client.create_index(**create_sl_index_params) 23 | with pytest.raises(PineconeApiException): 24 | client.create_index(**create_sl_index_params) 25 | 26 | @pytest.mark.skip(reason="Bug filed https://app.asana.com/0/1205078872348810/1205917627868143") 27 | def test_create_index_w_incompatible_options(self, client, create_sl_index_params): 28 | create_sl_index_params["pod_type"] = "p1.x2" 29 | create_sl_index_params["environment"] = "us-east1-gcp" 30 | create_sl_index_params["replicas"] = 2 31 | with pytest.raises(PineconeApiException): 32 | client.create_index(**create_sl_index_params) 33 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_create_index_timeouts.py: -------------------------------------------------------------------------------- 1 | class TestCreateIndexWithTimeout: 2 | def test_create_index_default_timeout(self, client, create_sl_index_params): 3 | create_sl_index_params["timeout"] = None 4 | client.create_index(**create_sl_index_params) 5 | # Waits infinitely for index to be ready 6 | desc = client.describe_index(create_sl_index_params["name"]) 7 | assert desc.status.ready == True 8 | 9 | def test_create_index_when_timeout_set(self, client, create_sl_index_params): 10 | create_sl_index_params["timeout"] = ( 11 | 1000 # effectively infinite, but different code path from None 12 | ) 13 | client.create_index(**create_sl_index_params) 14 | desc = client.describe_index(create_sl_index_params["name"]) 15 | assert desc.status.ready == True 16 | 17 | def test_create_index_with_negative_timeout(self, client, create_sl_index_params): 18 | create_sl_index_params["timeout"] = -1 19 | client.create_index(**create_sl_index_params) 20 | desc = client.describe_index(create_sl_index_params["name"]) 21 | # Returns immediately without waiting for index to be ready 22 | assert desc.status.ready in [False, True] 23 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_create_index_type_errors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import PineconeApiException, PineconeApiTypeError 3 | 4 | 5 | class TestCreateIndexTypeErrorCases: 6 | def test_create_index_with_invalid_str_dimension(self, client, create_sl_index_params): 7 | create_sl_index_params["dimension"] = "10" 8 | with pytest.raises(PineconeApiTypeError): 9 | client.create_index(**create_sl_index_params) 10 | 11 | def test_create_index_with_missing_dimension(self, client, create_sl_index_params): 12 | del create_sl_index_params["dimension"] 13 | with pytest.raises(PineconeApiException): 14 | client.create_index(**create_sl_index_params) 15 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_has_index.py: -------------------------------------------------------------------------------- 1 | from tests.integration.helpers import random_string 2 | 3 | 4 | class TestHasIndex: 5 | def test_index_exists_success(self, client, create_sl_index_params): 6 | name = create_sl_index_params["name"] 7 | client.create_index(**create_sl_index_params) 8 | has_index = client.has_index(name) 9 | assert has_index == True 10 | 11 | def test_index_does_not_exist(self, client): 12 | name = random_string(8) 13 | has_index = client.has_index(name) 14 | assert has_index == False 15 | 16 | def test_has_index_with_null_index_name(self, client): 17 | has_index = client.has_index("") 18 | assert has_index == False 19 | -------------------------------------------------------------------------------- /tests/integration/control/serverless/test_list_indexes.py: -------------------------------------------------------------------------------- 1 | from pinecone import IndexModel 2 | 3 | 4 | class TestListIndexes: 5 | def test_list_indexes_includes_ready_indexes( 6 | self, client, ready_sl_index, create_sl_index_params 7 | ): 8 | list_response = client.list_indexes() 9 | assert len(list_response.indexes) != 0 10 | assert isinstance(list_response.indexes[0], IndexModel) 11 | 12 | created_index = [index for index in list_response.indexes if index.name == ready_sl_index][ 13 | 0 14 | ] 15 | assert created_index.name == ready_sl_index 16 | assert created_index.dimension == create_sl_index_params["dimension"] 17 | assert created_index.metric == create_sl_index_params["metric"] 18 | assert ready_sl_index in created_index.host 19 | 20 | def test_list_indexes_includes_not_ready_indexes(self, client, notready_sl_index): 21 | list_response = client.list_indexes() 22 | assert len(list_response.indexes) != 0 23 | assert isinstance(list_response.indexes[0], IndexModel) 24 | 25 | created_index = [ 26 | index for index in list_response.indexes if index.name == notready_sl_index 27 | ][0] 28 | assert created_index.name == notready_sl_index 29 | assert notready_sl_index in created_index.name 30 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control_asyncio/__init__.py -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control_asyncio/resources/__init__.py -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/backup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control_asyncio/resources/backup/__init__.py -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import uuid 3 | import logging 4 | import dotenv 5 | import os 6 | from pinecone import Pinecone, PodIndexEnvironment 7 | from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params 8 | 9 | dotenv.load_dotenv() 10 | 11 | logger = logging.getLogger(__name__) 12 | """ :meta private: """ 13 | 14 | # Generate a unique ID for the entire test run 15 | RUN_ID = str(uuid.uuid4()) 16 | 17 | 18 | @pytest.fixture() 19 | def pc(): 20 | return Pinecone() 21 | 22 | 23 | @pytest.fixture 24 | def pod_environment(): 25 | return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) 26 | 27 | 28 | @pytest.fixture() 29 | def create_index_params(request): 30 | return default_create_index_params(request, RUN_ID) 31 | 32 | 33 | @pytest.fixture() 34 | def index_name(create_index_params): 35 | return create_index_params["name"] 36 | 37 | 38 | @pytest.fixture() 39 | def index_tags(create_index_params): 40 | return create_index_params["tags"] 41 | 42 | 43 | @pytest.fixture() 44 | def ready_sl_index(pc, index_name, create_index_params): 45 | create_index_params["timeout"] = None 46 | pc.create_index(**create_index_params) 47 | yield index_name 48 | pc.db.index.delete(name=index_name, timeout=-1) 49 | 50 | 51 | @pytest.fixture() 52 | def notready_sl_index(pc, index_name, create_index_params): 53 | pc.create_index(**create_index_params, timeout=-1) 54 | yield index_name 55 | 56 | 57 | def pytest_sessionfinish(session, exitstatus): 58 | """ 59 | Hook that runs after all tests have completed. 60 | This is a good place to clean up any resources that were created during the test session. 61 | """ 62 | logger.info("Running final cleanup after all tests...") 63 | 64 | pc = Pinecone() 65 | delete_indexes_from_run(pc, RUN_ID) 66 | delete_backups_from_run(pc, RUN_ID) 67 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/index/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control_asyncio/resources/index/__init__.py -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/index/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pinecone import CloudProvider, AwsRegion, ServerlessSpec 4 | 5 | 6 | @pytest.fixture() 7 | def spec1(serverless_cloud, serverless_region): 8 | return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} 9 | 10 | 11 | @pytest.fixture() 12 | def spec2(): 13 | return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) 14 | 15 | 16 | @pytest.fixture() 17 | def spec3(): 18 | return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} 19 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/resources/restore_job/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/control_asyncio/resources/restore_job/__init__.py -------------------------------------------------------------------------------- /tests/integration/control_asyncio/test_create_index_timeouts.py: -------------------------------------------------------------------------------- 1 | from pinecone import PineconeAsyncio 2 | import pytest 3 | 4 | 5 | @pytest.mark.asyncio 6 | class TestCreateIndexWithTimeout: 7 | async def test_create_index_default_timeout(self, create_sl_index_params): 8 | pc = PineconeAsyncio() 9 | 10 | create_sl_index_params["timeout"] = None 11 | await pc.create_index(**create_sl_index_params) 12 | # Waits infinitely for index to be ready 13 | desc = await pc.describe_index(create_sl_index_params["name"]) 14 | assert desc.status.ready == True 15 | await pc.close() 16 | 17 | async def test_create_index_when_timeout_set(self, create_sl_index_params): 18 | pc = PineconeAsyncio() 19 | 20 | create_sl_index_params["timeout"] = ( 21 | 1000 # effectively infinite, but different code path from None 22 | ) 23 | await pc.create_index(**create_sl_index_params) 24 | desc = await pc.describe_index(create_sl_index_params["name"]) 25 | assert desc.status.ready == True 26 | await pc.close() 27 | 28 | async def test_create_index_with_negative_timeout(self, create_sl_index_params): 29 | pc = PineconeAsyncio() 30 | 31 | create_sl_index_params["timeout"] = -1 32 | await pc.create_index(**create_sl_index_params) 33 | desc = await pc.describe_index(create_sl_index_params["name"]) 34 | # Returns immediately without waiting for index to be ready 35 | assert desc.status.ready in [False, True] 36 | await pc.close() 37 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/test_create_index_type_errors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import PineconeApiException, PineconeApiTypeError, PineconeAsyncio 3 | 4 | 5 | @pytest.mark.asyncio 6 | class TestCreateIndexTypeErrorCases: 7 | async def test_create_index_with_invalid_str_dimension(self, create_sl_index_params): 8 | pc = PineconeAsyncio() 9 | 10 | create_sl_index_params["dimension"] = "10" 11 | with pytest.raises(PineconeApiTypeError): 12 | await pc.create_index(**create_sl_index_params) 13 | await pc.close() 14 | 15 | async def test_create_index_with_missing_dimension(self, create_sl_index_params): 16 | pc = PineconeAsyncio() 17 | 18 | del create_sl_index_params["dimension"] 19 | with pytest.raises(PineconeApiException): 20 | await pc.create_index(**create_sl_index_params) 21 | await pc.close() 22 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/test_has_index.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from tests.integration.helpers import random_string 3 | from pinecone import PineconeAsyncio 4 | 5 | 6 | @pytest.mark.asyncio 7 | class TestHasIndex: 8 | async def test_index_exists_success(self, create_sl_index_params): 9 | pc = PineconeAsyncio() 10 | 11 | name = create_sl_index_params["name"] 12 | await pc.create_index(**create_sl_index_params) 13 | has_index = await pc.has_index(name) 14 | assert has_index == True 15 | await pc.close() 16 | 17 | async def test_index_does_not_exist(self): 18 | pc = PineconeAsyncio() 19 | 20 | name = random_string(8) 21 | has_index = await pc.has_index(name) 22 | assert has_index == False 23 | await pc.close() 24 | 25 | async def test_has_index_with_null_index_name(self): 26 | pc = PineconeAsyncio() 27 | 28 | has_index = await pc.has_index("") 29 | assert has_index == False 30 | await pc.close() 31 | -------------------------------------------------------------------------------- /tests/integration/control_asyncio/test_list_indexes.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import IndexModel, PineconeAsyncio 3 | 4 | 5 | @pytest.mark.asyncio 6 | class TestListIndexes: 7 | async def test_list_indexes_includes_ready_indexes( 8 | self, ready_sl_index, create_sl_index_params 9 | ): 10 | pc = PineconeAsyncio() 11 | 12 | list_response = await pc.list_indexes() 13 | assert len(list_response.indexes) != 0 14 | assert isinstance(list_response.indexes[0], IndexModel) 15 | 16 | created_index = [index for index in list_response.indexes if index.name == ready_sl_index][ 17 | 0 18 | ] 19 | assert created_index.name == ready_sl_index 20 | assert created_index.dimension == create_sl_index_params["dimension"] 21 | assert created_index.metric == create_sl_index_params["metric"] 22 | assert ready_sl_index in created_index.host 23 | await pc.close() 24 | 25 | async def test_list_indexes_includes_not_ready_indexes(self, notready_sl_index): 26 | pc = PineconeAsyncio() 27 | 28 | list_response = await pc.list_indexes() 29 | assert len(list_response.indexes) != 0 30 | assert isinstance(list_response.indexes[0], IndexModel) 31 | 32 | created_index = [ 33 | index for index in list_response.indexes if index.name == notready_sl_index 34 | ][0] 35 | assert created_index.name == notready_sl_index 36 | assert notready_sl_index in created_index.name 37 | await pc.close() 38 | -------------------------------------------------------------------------------- /tests/integration/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/data/__init__.py -------------------------------------------------------------------------------- /tests/integration/data/test_initialization.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestIndexClientInitialization: 5 | def test_index_direct_host_kwarg(self, client, index_host): 6 | index = client.Index(host=index_host) 7 | index.fetch(ids=["1", "2", "3"]) 8 | 9 | def test_index_direct_host_with_https(self, client, index_host): 10 | if not index_host.startswith("https://"): 11 | index_host = "https://" + index_host 12 | index = client.Index(host=index_host) 13 | index.fetch(ids=["1", "2", "3"]) 14 | 15 | def test_index_direct_host_without_https(self, client, index_host): 16 | if index_host.startswith("https://"): 17 | index_host = index_host[8:] 18 | index = client.Index(host=index_host) 19 | index.fetch(ids=["1", "2", "3"]) 20 | 21 | def test_index_by_name_positional_only(self, client, index_name, index_host): 22 | index = client.Index(index_name) 23 | index.fetch(ids=["1", "2", "3"]) 24 | 25 | def test_index_by_name_positional_with_host(self, client, index_name, index_host): 26 | index = client.Index(index_name, index_host) 27 | index.fetch(ids=["1", "2", "3"]) 28 | 29 | def test_index_by_name_kwargs(self, client, index_name): 30 | index = client.Index(name=index_name) 31 | index.fetch(ids=["1", "2", "3"]) 32 | 33 | def test_index_by_name_kwargs_with_host(self, client, index_name, index_host): 34 | index = client.Index(name=index_name, host=index_host) 35 | index.fetch(ids=["1", "2", "3"]) 36 | 37 | def test_raises_when_no_name_or_host(self, client, index_host): 38 | with pytest.raises(ValueError): 39 | client.Index() 40 | -------------------------------------------------------------------------------- /tests/integration/data/test_upsert_dense.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import Vector 3 | from ..helpers import poll_stats_for_namespace, embedding_values, random_string 4 | 5 | 6 | @pytest.fixture(scope="session") 7 | def upsert_dense_namespace(): 8 | return random_string(10) 9 | 10 | 11 | class TestUpsertDense: 12 | @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) 13 | def test_upsert_to_namespace(self, idx, upsert_dense_namespace, use_nondefault_namespace): 14 | target_namespace = upsert_dense_namespace if use_nondefault_namespace else "" 15 | 16 | # Upsert with tuples 17 | idx.upsert( 18 | vectors=[ 19 | ("1", embedding_values()), 20 | ("2", embedding_values()), 21 | ("3", embedding_values()), 22 | ], 23 | namespace=target_namespace, 24 | ) 25 | 26 | # Upsert with objects 27 | idx.upsert( 28 | vectors=[ 29 | Vector(id="4", values=embedding_values()), 30 | Vector(id="5", values=embedding_values()), 31 | Vector(id="6", values=embedding_values()), 32 | ], 33 | namespace=target_namespace, 34 | ) 35 | 36 | # Upsert with dict 37 | idx.upsert( 38 | vectors=[ 39 | {"id": "7", "values": embedding_values()}, 40 | {"id": "8", "values": embedding_values()}, 41 | {"id": "9", "values": embedding_values()}, 42 | ], 43 | namespace=target_namespace, 44 | ) 45 | 46 | poll_stats_for_namespace(idx, target_namespace, 9) 47 | 48 | # Check the vector count reflects some data has been upserted 49 | stats = idx.describe_index_stats() 50 | assert stats.total_vector_count >= 9 51 | assert stats.namespaces[target_namespace].vector_count == 9 52 | -------------------------------------------------------------------------------- /tests/integration/data/test_upsert_from_dataframe.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from pinecone.db_data import _Index 3 | from ..helpers import embedding_values, random_string 4 | 5 | 6 | class TestUpsertFromDataFrame: 7 | def test_upsert_from_dataframe(self, idx: _Index): 8 | # Create sample data for testing. 9 | data = { 10 | "id": ["1", "2", "3"], 11 | "values": [embedding_values(), embedding_values(), embedding_values()], 12 | "sparse_values": [ 13 | {"indices": [1], "values": [0.234]}, 14 | {"indices": [2], "values": [0.432]}, 15 | {"indices": [3], "values": [0.543]}, 16 | ], 17 | "metadata": [ 18 | {"source": "generated", "quality": "high"}, 19 | {"source": "generated", "quality": "medium"}, 20 | {"source": "generated", "quality": "low"}, 21 | ], 22 | } 23 | 24 | # Create the DataFrame 25 | df = pd.DataFrame(data) 26 | 27 | ns = random_string(10) 28 | idx.upsert_from_dataframe(df=df, namespace=ns) 29 | -------------------------------------------------------------------------------- /tests/integration/data/test_upsert_hybrid.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from pinecone import Vector, SparseValues 4 | from ..helpers import poll_stats_for_namespace, embedding_values 5 | 6 | 7 | @pytest.mark.skipif( 8 | os.getenv("METRIC") != "dotproduct", reason="Only metric=dotprodouct indexes support hybrid" 9 | ) 10 | class TestUpsertHybrid: 11 | @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) 12 | def test_upsert_to_namespace_with_sparse_embedding_values( 13 | self, idx, namespace, use_nondefault_namespace 14 | ): 15 | target_namespace = namespace if use_nondefault_namespace else "" 16 | 17 | # Upsert with sparse values object 18 | idx.upsert( 19 | vectors=[ 20 | Vector( 21 | id="1", 22 | values=embedding_values(), 23 | sparse_values=SparseValues(indices=[0, 1], values=embedding_values()), 24 | ) 25 | ], 26 | namespace=target_namespace, 27 | ) 28 | 29 | # Upsert with sparse values dict 30 | idx.upsert( 31 | vectors=[ 32 | { 33 | "id": "2", 34 | "values": embedding_values(), 35 | "sparse_values": {"indices": [0, 1], "values": embedding_values()}, 36 | }, 37 | { 38 | "id": "3", 39 | "values": embedding_values(), 40 | "sparse_values": {"indices": [0, 1], "values": embedding_values()}, 41 | }, 42 | ], 43 | namespace=target_namespace, 44 | ) 45 | 46 | poll_stats_for_namespace(idx, target_namespace, 9) 47 | 48 | # Check the vector count reflects some data has been upserted 49 | stats = idx.describe_index_stats() 50 | assert stats.total_vector_count >= 9 51 | assert stats.namespaces[target_namespace].vector_count == 9 52 | -------------------------------------------------------------------------------- /tests/integration/data_asyncio/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/data_asyncio/__init__.py -------------------------------------------------------------------------------- /tests/integration/data_asyncio/test_client_instantiation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import Pinecone 3 | from ..helpers import random_string, embedding_values 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_instantiation_through_non_async_client(index_host, dimension): 8 | asyncio_idx = Pinecone().IndexAsyncio(host=index_host) 9 | 10 | def emb(): 11 | return embedding_values(dimension) 12 | 13 | # Upsert with tuples 14 | await asyncio_idx.upsert( 15 | vectors=[("1", emb()), ("2", emb()), ("3", emb())], namespace=random_string(10) 16 | ) 17 | 18 | await asyncio_idx.close() 19 | -------------------------------------------------------------------------------- /tests/integration/data_asyncio/test_list.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import Vector 3 | from .conftest import build_asyncioindex_client, poll_for_freshness 4 | from ..helpers import random_string, embedding_values 5 | 6 | 7 | @pytest.mark.asyncio 8 | @pytest.mark.parametrize("target_namespace", [random_string(20)]) 9 | async def test_list(index_host, dimension, target_namespace): 10 | asyncio_idx = build_asyncioindex_client(index_host) 11 | 12 | await asyncio_idx.upsert( 13 | vectors=[ 14 | Vector(id=str(i), values=embedding_values(dimension), metadata={"genre": "action"}) 15 | for i in range(100) 16 | ], 17 | namespace=target_namespace, 18 | batch_size=10, 19 | show_progress=False, 20 | ) 21 | 22 | await poll_for_freshness(asyncio_idx, target_namespace, 100) 23 | 24 | # List all vectors 25 | async for ids_list in asyncio_idx.list(namespace=target_namespace, limit=11, prefix="9"): 26 | assert set(ids_list) == set( 27 | ("9", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99") 28 | ) 29 | 30 | pages = 0 31 | async for ids_list in asyncio_idx.list(namespace=target_namespace, limit=4): 32 | pages += 1 33 | assert len(ids_list) <= 4 34 | assert pages == 25 35 | await asyncio_idx.close() 36 | -------------------------------------------------------------------------------- /tests/integration/data_asyncio/test_unauthorized_access.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import PineconeAsyncio, ForbiddenException 3 | 4 | 5 | @pytest.mark.asyncio 6 | async def test_unauthorized_requests_rejected(index_host): 7 | async with PineconeAsyncio(api_key="invalid_key") as pc: 8 | async with pc.IndexAsyncio(host=index_host) as asyncio_idx: 9 | with pytest.raises(ForbiddenException) as e: 10 | await asyncio_idx.describe_index_stats() 11 | assert "Wrong API key" in str(e.value) 12 | -------------------------------------------------------------------------------- /tests/integration/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | from .helpers import ( 2 | fake_api_key, 3 | get_environment_var, 4 | random_string, 5 | generate_index_name, 6 | generate_collection_name, 7 | poll_stats_for_namespace, 8 | poll_fetch_for_ids_in_namespace, 9 | embedding_values, 10 | jsonprint, 11 | index_tags, 12 | delete_backups_from_run, 13 | delete_indexes_from_run, 14 | default_create_index_params, 15 | ) 16 | 17 | __all__ = [ 18 | "fake_api_key", 19 | "get_environment_var", 20 | "random_string", 21 | "generate_index_name", 22 | "generate_collection_name", 23 | "poll_stats_for_namespace", 24 | "poll_fetch_for_ids_in_namespace", 25 | "embedding_values", 26 | "jsonprint", 27 | "index_tags", 28 | "delete_backups_from_run", 29 | "delete_indexes_from_run", 30 | "default_create_index_params", 31 | ] 32 | -------------------------------------------------------------------------------- /tests/integration/inference/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/inference/__init__.py -------------------------------------------------------------------------------- /tests/integration/inference/asyncio/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/inference/asyncio/__init__.py -------------------------------------------------------------------------------- /tests/integration/inference/sync/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/inference/sync/__init__.py -------------------------------------------------------------------------------- /tests/integration/plugins/test_plugins.py: -------------------------------------------------------------------------------- 1 | from pinecone import Pinecone 2 | 3 | 4 | class TestAssistantPlugin: 5 | def test_assistant_plugin(self): 6 | pc = Pinecone() 7 | pc.assistant.list_assistants() 8 | assert True, "This should pass without errors" 9 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca-cert.cer: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDNTCCAh2gAwIBAgIUG5Ji5NxWD3Q7h8remh7vYloa1UMwDQYJKoZIhvcNAQEL 3 | BQAwKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAltaXRtcHJveHkwHhcN 4 | MjQwMzE3MDQwNjA2WhcNMzQwMzE3MDQwNjA2WjAoMRIwEAYDVQQDDAltaXRtcHJv 5 | eHkxEjAQBgNVBAoMCW1pdG1wcm94eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC 6 | AQoCggEBAI96RxFM2U3cXyhJre0DbJvZDvrr5IEFJhEO9+7vRFM73cTax2jhUDQx 7 | ZLx5LgWWQmqTfNop5ON1XKqYMxpjTJrHEbIcnybLRmLL+SXVsj547vRH1rps+G4m 8 | 3iJWorGju3PieJYj8ppro0mhlynZRHOM8EzkX9TgxdtFpz3hejy9btOwEkRGrjM1 9 | 5prsDubYn0JwGz6N2N/yAf9mviWKnP1xc1CD2xIJwJKX1Tyqi9B93w1YL5JFV7yg 10 | rdlRw4X0a3wav7GiJJkylv8cZrtZ4Kt4TwNMLpqh21LRqJkwyFE8NLXMD/aS4q2U 11 | 3K5ml6H9MthNkrheH0RlsiOe5RQJMAcCAwEAAaNXMFUwDwYDVR0TAQH/BAUwAwEB 12 | /zATBgNVHSUEDDAKBggrBgEFBQcDATAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE 13 | FM83YTNU3L2z9vvQvHrGX0U/XAf2MA0GCSqGSIb3DQEBCwUAA4IBAQARURZnD7Nm 14 | d/kN1gIpl+x9aAaMLlvS3hgn6quuVVJzyiHYZKmBq/76VPIyn4dSFQakvS5nob3R 15 | FNzlq3QR6o4jAR6BIEzuKDKExFdYz7hfBA6JgGUxTsofJPBmqC2BvRZlkt/Qb3ea 16 | HDCJUYOXfppABimlVi5gOVf6r80wcuqTK6sIp+V+HVhAf2RbpAFnLWOSzkZ7Qaa9 17 | jZJ5Jd2nYTx+eOjkNZL2kiV6R9tvuJK0C9nQeJJDTwkmksLJEg+5CS6D51zdRgdc 18 | dCvvesmF6dWQmOxZdm3pqusTkIWNq2RBb2kEqZA84cfVLX4+OOhbieC9XKQjsOcE 19 | h+rsI/lmeuR9 20 | -----END CERTIFICATE----- 21 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca-cert.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca-cert.p12 -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca-cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDNTCCAh2gAwIBAgIUG5Ji5NxWD3Q7h8remh7vYloa1UMwDQYJKoZIhvcNAQEL 3 | BQAwKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAltaXRtcHJveHkwHhcN 4 | MjQwMzE3MDQwNjA2WhcNMzQwMzE3MDQwNjA2WjAoMRIwEAYDVQQDDAltaXRtcHJv 5 | eHkxEjAQBgNVBAoMCW1pdG1wcm94eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC 6 | AQoCggEBAI96RxFM2U3cXyhJre0DbJvZDvrr5IEFJhEO9+7vRFM73cTax2jhUDQx 7 | ZLx5LgWWQmqTfNop5ON1XKqYMxpjTJrHEbIcnybLRmLL+SXVsj547vRH1rps+G4m 8 | 3iJWorGju3PieJYj8ppro0mhlynZRHOM8EzkX9TgxdtFpz3hejy9btOwEkRGrjM1 9 | 5prsDubYn0JwGz6N2N/yAf9mviWKnP1xc1CD2xIJwJKX1Tyqi9B93w1YL5JFV7yg 10 | rdlRw4X0a3wav7GiJJkylv8cZrtZ4Kt4TwNMLpqh21LRqJkwyFE8NLXMD/aS4q2U 11 | 3K5ml6H9MthNkrheH0RlsiOe5RQJMAcCAwEAAaNXMFUwDwYDVR0TAQH/BAUwAwEB 12 | /zATBgNVHSUEDDAKBggrBgEFBQcDATAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE 13 | FM83YTNU3L2z9vvQvHrGX0U/XAf2MA0GCSqGSIb3DQEBCwUAA4IBAQARURZnD7Nm 14 | d/kN1gIpl+x9aAaMLlvS3hgn6quuVVJzyiHYZKmBq/76VPIyn4dSFQakvS5nob3R 15 | FNzlq3QR6o4jAR6BIEzuKDKExFdYz7hfBA6JgGUxTsofJPBmqC2BvRZlkt/Qb3ea 16 | HDCJUYOXfppABimlVi5gOVf6r80wcuqTK6sIp+V+HVhAf2RbpAFnLWOSzkZ7Qaa9 17 | jZJ5Jd2nYTx+eOjkNZL2kiV6R9tvuJK0C9nQeJJDTwkmksLJEg+5CS6D51zdRgdc 18 | dCvvesmF6dWQmOxZdm3pqusTkIWNq2RBb2kEqZA84cfVLX4+OOhbieC9XKQjsOcE 19 | h+rsI/lmeuR9 20 | -----END CERTIFICATE----- 21 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/.mitm/proxy1/mitmproxy-ca.p12 -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy1/mitmproxy-dhparam.pem: -------------------------------------------------------------------------------- 1 | 2 | -----BEGIN DH PARAMETERS----- 3 | MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 4 | O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv 5 | j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ 6 | Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB 7 | chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC 8 | ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq 9 | o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX 10 | IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv 11 | A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 12 | 6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I 13 | rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= 14 | -----END DH PARAMETERS----- 15 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca-cert.cer: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDNTCCAh2gAwIBAgIUUo4sMqY4s3aM0RqjLhD1ZzGOhnowDQYJKoZIhvcNAQEL 3 | BQAwKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAltaXRtcHJveHkwHhcN 4 | MjQwMzE3MDQwNjA2WhcNMzQwMzE3MDQwNjA2WjAoMRIwEAYDVQQDDAltaXRtcHJv 5 | eHkxEjAQBgNVBAoMCW1pdG1wcm94eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC 6 | AQoCggEBAJ/BEbexCoDvIEB2zF8R13tNVqc5eW5kS4Rp0IqKSfWmmrghA0bc6X22 7 | p6juusl1KSpoWcR1L0iD1Wa2Tlaip0c/DJUwJHwJ70UZyWjwAJPbF282dYqqwygC 8 | hWP1EFKVlctHE6MEMc+o1W7hLC690n0EKtatT5lCHSuUwK69RoNijfPqJrqstQKN 9 | hJZ9bDIHVwi86jUbUcfjb9Uo/AiMjAonuy82wiarHdNmRIIcRcBvXkhx7on/5X5z 10 | /Vq4+lgR91lP+6qYotHI988e4plF0KuzjrTPyki7+OiyJkMxJwJW/E1DU6bvTchN 11 | H9wB27kJ6GtFW21n1YqRWpCR7JyQ4D8CAwEAAaNXMFUwDwYDVR0TAQH/BAUwAwEB 12 | /zATBgNVHSUEDDAKBggrBgEFBQcDATAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE 13 | FBNhRsjEjijaA8rS3XezhrtEpVvRMA0GCSqGSIb3DQEBCwUAA4IBAQAc8wSUSk7y 14 | Sz4pQmi6EciZmU9jEnBHld9uYJ4mqRR2oPm+eRPq0yW1VifNEgMLSqNcv8/EH93o 15 | C16jHHQ5TrV0C+wMnnUN3BxliDsi6FdbMa92Df09K9C/LP/v68H4rtMaMskvOrHw 16 | k/r/NsKCxZ1GywLA7s/yVKgtr7ARARf6hHJS6/bxqohdaCFZtxmQIH26sOkTV2Ds 17 | pf1ey+d3xitOl/roLXV91KjGfML4PRCzIPOw0+odSw62e2kikI77OQxOEn4zjyg+ 18 | a0B344gMV7LaNTyqLTx41wU0hk62CeHHS4Gc0XLMfw9NYPTrjyQYK1+lEWDSEHCn 19 | TiBThXoIGeAU 20 | -----END CERTIFICATE----- 21 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca-cert.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca-cert.p12 -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca-cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDNTCCAh2gAwIBAgIUUo4sMqY4s3aM0RqjLhD1ZzGOhnowDQYJKoZIhvcNAQEL 3 | BQAwKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAltaXRtcHJveHkwHhcN 4 | MjQwMzE3MDQwNjA2WhcNMzQwMzE3MDQwNjA2WjAoMRIwEAYDVQQDDAltaXRtcHJv 5 | eHkxEjAQBgNVBAoMCW1pdG1wcm94eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC 6 | AQoCggEBAJ/BEbexCoDvIEB2zF8R13tNVqc5eW5kS4Rp0IqKSfWmmrghA0bc6X22 7 | p6juusl1KSpoWcR1L0iD1Wa2Tlaip0c/DJUwJHwJ70UZyWjwAJPbF282dYqqwygC 8 | hWP1EFKVlctHE6MEMc+o1W7hLC690n0EKtatT5lCHSuUwK69RoNijfPqJrqstQKN 9 | hJZ9bDIHVwi86jUbUcfjb9Uo/AiMjAonuy82wiarHdNmRIIcRcBvXkhx7on/5X5z 10 | /Vq4+lgR91lP+6qYotHI988e4plF0KuzjrTPyki7+OiyJkMxJwJW/E1DU6bvTchN 11 | H9wB27kJ6GtFW21n1YqRWpCR7JyQ4D8CAwEAAaNXMFUwDwYDVR0TAQH/BAUwAwEB 12 | /zATBgNVHSUEDDAKBggrBgEFBQcDATAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE 13 | FBNhRsjEjijaA8rS3XezhrtEpVvRMA0GCSqGSIb3DQEBCwUAA4IBAQAc8wSUSk7y 14 | Sz4pQmi6EciZmU9jEnBHld9uYJ4mqRR2oPm+eRPq0yW1VifNEgMLSqNcv8/EH93o 15 | C16jHHQ5TrV0C+wMnnUN3BxliDsi6FdbMa92Df09K9C/LP/v68H4rtMaMskvOrHw 16 | k/r/NsKCxZ1GywLA7s/yVKgtr7ARARf6hHJS6/bxqohdaCFZtxmQIH26sOkTV2Ds 17 | pf1ey+d3xitOl/roLXV91KjGfML4PRCzIPOw0+odSw62e2kikI77OQxOEn4zjyg+ 18 | a0B344gMV7LaNTyqLTx41wU0hk62CeHHS4Gc0XLMfw9NYPTrjyQYK1+lEWDSEHCn 19 | TiBThXoIGeAU 20 | -----END CERTIFICATE----- 21 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/.mitm/proxy2/mitmproxy-ca.p12 -------------------------------------------------------------------------------- /tests/integration/proxy_config/.mitm/proxy2/mitmproxy-dhparam.pem: -------------------------------------------------------------------------------- 1 | 2 | -----BEGIN DH PARAMETERS----- 3 | MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 4 | O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv 5 | j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ 6 | Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB 7 | chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC 8 | ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq 9 | o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX 10 | IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv 11 | A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 12 | 6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I 13 | rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= 14 | -----END DH PARAMETERS----- 15 | -------------------------------------------------------------------------------- /tests/integration/proxy_config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/__init__.py -------------------------------------------------------------------------------- /tests/integration/proxy_config/logs/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/integration/proxy_config/logs/.gitkeep -------------------------------------------------------------------------------- /tests/perf/test_query_namespaces.py: -------------------------------------------------------------------------------- 1 | import time 2 | import random 3 | import pytest 4 | from pinecone import Pinecone 5 | 6 | latencies = [] 7 | 8 | 9 | def call_n_threads(index): 10 | query_vec = [random.random() for i in range(1024)] 11 | start = time.time() 12 | combined_results = index.query_namespaces( 13 | vector=query_vec, 14 | namespaces=["ns1", "ns2", "ns3", "ns4"], 15 | include_values=False, 16 | include_metadata=True, 17 | filter={"publication_date": {"$eq": "Last3Months"}}, 18 | top_k=1000, 19 | ) 20 | finish = time.time() 21 | # print(f"Query took {finish-start} seconds") 22 | latencies.append(finish - start) 23 | 24 | return combined_results 25 | 26 | 27 | class TestQueryNamespacesRest: 28 | # @pytest.mark.parametrize("n_threads", [4]) 29 | # def test_query_namespaces_grpc(self, benchmark, n_threads): 30 | # pc = PineconeGRPC() 31 | # index = pc.Index( 32 | # host="jen1024-dojoi3u.svc.apw5-4e34-81fa.pinecone.io", pool_threads=n_threads 33 | # ) 34 | # benchmark.pedantic(call_n_threads, (index,), rounds=10, warmup_rounds=1, iterations=5) 35 | 36 | @pytest.mark.parametrize("rest_lib", ["urllib3", "httpx-http11", "httpx-http2"]) 37 | def test_query_namespaces_rest(self, benchmark, rest_lib): 38 | pc = Pinecone() 39 | index = pc.Index( 40 | host="jen1024-dojoi3u.svc.apw5-4e34-81fa.pinecone.io", 41 | pool_threads=4, 42 | connection_pool_maxsize=20, 43 | ) 44 | benchmark.pedantic(call_n_threads, (index,), rounds=10, warmup_rounds=1, iterations=5) 45 | -------------------------------------------------------------------------------- /tests/perf/test_query_results_aggregator.py: -------------------------------------------------------------------------------- 1 | import random 2 | from pinecone.db_data.query_results_aggregator import QueryResultsAggregator 3 | 4 | 5 | def fake_results(i): 6 | matches = [ 7 | {"id": f"id{i}", "score": random.random(), "values": [random.random() for _ in range(768)]} 8 | for _ in range(1000) 9 | ] 10 | matches.sort(key=lambda x: x["score"], reverse=True) 11 | return {"namespace": f"ns{i}", "matches": matches} 12 | 13 | 14 | def aggregate_results(responses): 15 | ag = QueryResultsAggregator(1000) 16 | for response in responses: 17 | ag.add_results(response) 18 | return ag.get_results() 19 | 20 | 21 | class TestQueryResultsAggregatorPerf: 22 | def test_my_stuff(self, benchmark): 23 | responses = [fake_results(i) for i in range(10)] 24 | benchmark(aggregate_results, responses) 25 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/data/test_datetime_parsing.py: -------------------------------------------------------------------------------- 1 | from pinecone import Config 2 | from pinecone.core.openapi.db_data.models import Vector 3 | 4 | 5 | class TestDatetimeConversion: 6 | def test_datetimes_not_coerced(self): 7 | vec = Vector( 8 | id="1", 9 | values=[0.1, 0.2, 0.3], 10 | metadata={"created_at": "7th of January, 2023"}, 11 | _check_type=True, 12 | _configuration=Config(), 13 | ) 14 | assert vec.metadata["created_at"] == "7th of January, 2023" 15 | assert isinstance(vec.metadata["created_at"], str) 16 | 17 | def test_dates_not_coerced(self): 18 | vec = Vector( 19 | id="1", 20 | values=[0.1, 0.2, 0.3], 21 | metadata={"created_at": "8/12/2024"}, 22 | _check_type=True, 23 | _configuration=Config(), 24 | ) 25 | assert vec.metadata["created_at"] == "8/12/2024" 26 | assert isinstance(vec.metadata["created_at"], str) 27 | -------------------------------------------------------------------------------- /tests/unit/data/test_import_datetime_parsing.py: -------------------------------------------------------------------------------- 1 | from urllib3 import HTTPResponse 2 | 3 | from datetime import datetime, date 4 | 5 | from pinecone.core.openapi.db_data.api.bulk_operations_api import BulkOperationsApi 6 | from pinecone.openapi_support import ApiClient, RESTResponse 7 | 8 | 9 | def fake_response(mocker, body: str, status: int = 200): 10 | r = HTTPResponse( 11 | body=body.encode("utf-8"), 12 | headers={"content-type": "application/json"}, 13 | status=status, 14 | reason="OK", 15 | preload_content=True, 16 | ) 17 | api_client = ApiClient() 18 | return_value = RESTResponse(r.status, r.data, r.headers, r.reason) 19 | mocker.patch.object(api_client, "request", return_value=return_value) 20 | return api_client 21 | 22 | 23 | class TestBulkImport: 24 | def test_parsing_datetime_fields(self, mocker): 25 | body = """ 26 | { 27 | "id": "1", 28 | "uri": "s3://pinecone-bulk-import-dataset/cc-news/cc-news-part1.parquet", 29 | "status": "Pending", 30 | "percentComplete": 0, 31 | "recordsImported": 0, 32 | "createdAt": "2024-08-27T17:10:32.206413+00:00" 33 | } 34 | """ 35 | api_client = fake_response(mocker, body, 200) 36 | api = BulkOperationsApi(api_client=api_client) 37 | 38 | r = api.describe_bulk_import(id="1") 39 | assert r.created_at.year == 2024 40 | assert r.created_at.month == 8 41 | assert r.created_at.date() == date(year=2024, month=8, day=27) 42 | assert r.created_at.time() == datetime.strptime("17:10:32.206413", "%H:%M:%S.%f").time() 43 | -------------------------------------------------------------------------------- /tests/unit/data/test_instantiation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestIndexInstantiation: 5 | def test_invalid_host(self): 6 | from pinecone import Pinecone 7 | 8 | pc = Pinecone(api_key="key") 9 | 10 | with pytest.raises(ValueError) as e: 11 | pc.Index(host="invalid") 12 | assert "You passed 'invalid' as the host but this does not appear to be valid" in str( 13 | e.value 14 | ) 15 | 16 | with pytest.raises(ValueError) as e: 17 | pc.Index(host="my-index") 18 | assert "You passed 'my-index' as the host but this does not appear to be valid" in str( 19 | e.value 20 | ) 21 | 22 | # Can instantiate with realistic host 23 | pc.Index(host="test-bt8x3su.svc.apw5-4e34-81fa.pinecone.io") 24 | 25 | # Can instantiate with localhost address 26 | pc.Index(host="localhost:8080") 27 | -------------------------------------------------------------------------------- /tests/unit/models/test_index_embed.py: -------------------------------------------------------------------------------- 1 | from pinecone import IndexEmbed, EmbedModel, Metric 2 | 3 | 4 | def test_initialization_required_fields(): 5 | embed = IndexEmbed(model="test-model", field_map={"text": "my_text_field"}) 6 | 7 | assert embed.model == "test-model" 8 | assert embed.field_map == {"text": "my_text_field"} 9 | 10 | 11 | def test_initialization_with_optional_fields(): 12 | embed = IndexEmbed( 13 | model="test-model", 14 | field_map={"text": "my_text_field"}, 15 | metric="cosine", 16 | read_parameters={"param1": "value1"}, 17 | write_parameters={"param2": "value2"}, 18 | ) 19 | 20 | assert embed.model == "test-model" 21 | assert embed.field_map == {"text": "my_text_field"} 22 | assert embed.metric == "cosine" 23 | assert embed.read_parameters == {"param1": "value1"} 24 | assert embed.write_parameters == {"param2": "value2"} 25 | 26 | 27 | def test_as_dict_method(): 28 | embed = IndexEmbed( 29 | model="test-model", 30 | field_map={"text": "my_text_field"}, 31 | metric="cosine", 32 | read_parameters={"param1": "value1"}, 33 | write_parameters={"param2": "value2"}, 34 | ) 35 | embed_dict = embed.as_dict() 36 | 37 | expected_dict = { 38 | "model": "test-model", 39 | "field_map": {"text": "my_text_field"}, 40 | "metric": "cosine", 41 | "read_parameters": {"param1": "value1"}, 42 | "write_parameters": {"param2": "value2"}, 43 | } 44 | 45 | assert embed_dict == expected_dict 46 | 47 | 48 | def test_when_passed_enums(): 49 | embed = IndexEmbed( 50 | model=EmbedModel.Multilingual_E5_Large, 51 | field_map={"text": "my_text_field"}, 52 | metric=Metric.COSINE, 53 | ) 54 | 55 | assert embed.model == EmbedModel.Multilingual_E5_Large.value 56 | assert embed.field_map == {"text": "my_text_field"} 57 | assert embed.metric == Metric.COSINE.value 58 | -------------------------------------------------------------------------------- /tests/unit/models/test_index_model.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.db_control.models import ( 2 | IndexModel as OpenApiIndexModel, 3 | IndexModelStatus, 4 | IndexModelSpec, 5 | ServerlessSpec, 6 | DeletionProtection, 7 | ) 8 | from pinecone.db_control.models import IndexModel 9 | from pinecone import CloudProvider, AwsRegion 10 | 11 | 12 | class TestIndexModel: 13 | def test_index_model(self): 14 | openapi_model = OpenApiIndexModel( 15 | name="test-index-1", 16 | dimension=2, 17 | metric="cosine", 18 | host="https://test-index-1.pinecone.io", 19 | status=IndexModelStatus(ready=True, state="Ready"), 20 | deletion_protection=DeletionProtection("enabled"), 21 | spec=IndexModelSpec( 22 | serverless=ServerlessSpec( 23 | cloud=CloudProvider.AWS.value, region=AwsRegion.US_EAST_1.value 24 | ) 25 | ), 26 | ) 27 | 28 | wrapped = IndexModel(openapi_model) 29 | 30 | assert wrapped.name == "test-index-1" 31 | assert wrapped.dimension == 2 32 | assert wrapped.metric == "cosine" 33 | assert wrapped.host == "https://test-index-1.pinecone.io" 34 | assert wrapped.status.ready == True 35 | assert wrapped.status.state == "Ready" 36 | assert wrapped.deletion_protection == "enabled" 37 | 38 | assert wrapped["name"] == "test-index-1" 39 | -------------------------------------------------------------------------------- /tests/unit/openapi_support/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinecone-io/pinecone-python-client/d1282ce5ec785fcc9f5342f6aa7fe68ac47684c9/tests/unit/openapi_support/__init__.py -------------------------------------------------------------------------------- /tests/unit/openapi_support/test_model_simple.py: -------------------------------------------------------------------------------- 1 | from pinecone.core.openapi.db_control.models import DeletionProtection 2 | 3 | 4 | def test_simple_model_instantiation(): 5 | dp = DeletionProtection(value="enabled") 6 | assert dp.value == "enabled" 7 | 8 | dp2 = DeletionProtection(value="disabled") 9 | assert dp2.value == "disabled" 10 | 11 | dp3 = DeletionProtection("enabled") 12 | assert dp3.value == "enabled" 13 | -------------------------------------------------------------------------------- /tests/unit/test_langchain_helpful_errors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone import Pinecone 3 | 4 | 5 | class TestLangchainErrorMessages: 6 | def test_error_from_texts_positional_args(self): 7 | with pytest.raises(AttributeError) as e: 8 | Pinecone.from_texts("texts", "id") 9 | assert "from_texts is not a top-level attribute of the Pinecone class" in str(e.value) 10 | 11 | def test_error_from_texts_kwargs(self): 12 | with pytest.raises(AttributeError) as e: 13 | Pinecone.from_texts(foo="texts", bar="id", num_threads=1) 14 | assert "from_texts is not a top-level attribute of the Pinecone class" in str(e.value) 15 | 16 | def test_error_from_documents(self): 17 | with pytest.raises(AttributeError) as e: 18 | Pinecone.from_documents("documents", "id") 19 | assert "from_documents is not a top-level attribute of the Pinecone class" in str(e.value) 20 | -------------------------------------------------------------------------------- /tests/unit/test_plugin_aware.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone.utils.plugin_aware import PluginAware 3 | from pinecone.config import Config, OpenApiConfiguration 4 | 5 | 6 | class TestPluginAware: 7 | def test_errors_when_required_attributes_are_missing(self): 8 | class Foo(PluginAware): 9 | def __init__(self): 10 | # does not set config, openapi_config, or pool_threads 11 | super().__init__() 12 | 13 | with pytest.raises(AttributeError) as e: 14 | Foo() 15 | 16 | assert "_config" in str(e.value) 17 | assert "_openapi_config" in str(e.value) 18 | assert "_pool_threads" in str(e.value) 19 | 20 | def test_correctly_raise_attribute_errors(self): 21 | class Foo(PluginAware): 22 | def __init__(self): 23 | self.config = Config() 24 | self._openapi_config = OpenApiConfiguration() 25 | self._pool_threads = 1 26 | 27 | super().__init__() 28 | 29 | foo = Foo() 30 | 31 | with pytest.raises(AttributeError) as e: 32 | foo.bar() 33 | 34 | assert "bar" in str(e.value) 35 | 36 | def test_plugins_are_lazily_loaded(self): 37 | class Pinecone(PluginAware): 38 | def __init__(self): 39 | self.config = Config() 40 | self._openapi_config = OpenApiConfiguration() 41 | self._pool_threads = 10 42 | 43 | super().__init__() 44 | 45 | pc = Pinecone() 46 | assert "assistant" not in dir(pc) 47 | 48 | assert pc.assistant is not None 49 | -------------------------------------------------------------------------------- /tests/unit/test_version.py: -------------------------------------------------------------------------------- 1 | import re 2 | import pinecone 3 | 4 | 5 | def test_version(): 6 | assert re.search(r"\d+\.\d+\.\d+", pinecone.__version__) is not None 7 | -------------------------------------------------------------------------------- /tests/unit/utils/test_convert_to_list.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pinecone.utils import convert_to_list 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def test_convert_to_list_when_numpy_array(): 8 | obj = np.array([1, 2, 3]) 9 | actual = convert_to_list(obj) 10 | expected = [1, 2, 3] 11 | assert actual == expected 12 | assert actual[0].__class__ == expected[0].__class__ 13 | 14 | 15 | def test_convert_to_list_when_pandas_array(): 16 | obj = pd.array([1, 2, 3]) 17 | actual = convert_to_list(obj) 18 | expected = [1, 2, 3] 19 | assert actual == expected 20 | assert actual[0].__class__ == expected[0].__class__ 21 | 22 | 23 | def test_convert_to_list_when_pandas_float_array(): 24 | obj = pd.array([0.1, 0.2, 0.3]) 25 | actual = convert_to_list(obj) 26 | expected = [0.1, 0.2, 0.3] 27 | assert actual == expected 28 | assert actual[0].__class__ == expected[0].__class__ 29 | 30 | 31 | def test_convert_to_list_when_pandas_series(): 32 | obj = pd.Series([1, 2, 3]) 33 | actual = convert_to_list(obj) 34 | expected = [1, 2, 3] 35 | assert actual == expected 36 | assert actual[0].__class__ == expected[0].__class__ 37 | 38 | 39 | def test_convert_to_list_when_already_list(): 40 | obj = [1, 2, 3] 41 | actual = convert_to_list(obj) 42 | expected = [1, 2, 3] 43 | assert actual == expected 44 | 45 | 46 | @pytest.mark.parametrize("input", ["", "not a list", {}]) 47 | def test_invalid_iterable_inputs(input): 48 | with pytest.raises(TypeError, match="Expected a list or list-like data structure"): 49 | convert_to_list(input) 50 | 51 | 52 | @pytest.mark.parametrize("invalid_input", [None, 1, 0, 1.0, True]) 53 | def test_invalid_non_iterable_input(invalid_input): 54 | with pytest.raises(TypeError, match="Expected a list or list-like data structure"): 55 | convert_to_list(invalid_input) 56 | -------------------------------------------------------------------------------- /tests/unit/utils/test_docs_links.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import requests 3 | from pinecone.utils import docslinks 4 | from pinecone import __version__ 5 | 6 | urls = list(docslinks.values()) 7 | 8 | 9 | @pytest.mark.parametrize("url", urls) 10 | def test_valid_links(url): 11 | if isinstance(url, str): 12 | response = requests.get(url) 13 | assert response.status_code == 200, f"Docs link is invalid: {url}" 14 | else: 15 | versioned_url = url(__version__) 16 | response = requests.get(versioned_url) 17 | assert response.status_code == 200, f"Docs link is invalid: {versioned_url}" 18 | -------------------------------------------------------------------------------- /tests/unit/utils/test_normalize_host.py: -------------------------------------------------------------------------------- 1 | from pinecone.utils import normalize_host 2 | 3 | 4 | def test_when_url_is_none(): 5 | assert normalize_host(None) == "" 6 | 7 | 8 | def test_when_url_is_https(): 9 | assert ( 10 | normalize_host("https://index-name-abcdef.svc.pinecone.io") 11 | == "https://index-name-abcdef.svc.pinecone.io" 12 | ) 13 | 14 | 15 | def test_when_url_is_http(): 16 | # This should not occur in prod, but if it does, we will leave it alone. 17 | # Could be useful when testing with local proxies. 18 | assert ( 19 | normalize_host("http://index-name-abcdef.svc.pinecone.io") 20 | == "http://index-name-abcdef.svc.pinecone.io" 21 | ) 22 | 23 | 24 | def test_when_url_is_host_without_protocol(): 25 | assert ( 26 | normalize_host("index-name-abcdef.svc.pinecone.io") 27 | == "https://index-name-abcdef.svc.pinecone.io" 28 | ) 29 | 30 | 31 | def test_can_be_called_multiple_times(): 32 | assert ( 33 | normalize_host(normalize_host("index-name-abcdef.svc.pinecone.io")) 34 | == "https://index-name-abcdef.svc.pinecone.io" 35 | ) 36 | -------------------------------------------------------------------------------- /tests/unit_grpc/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | def vector_dim(): 6 | return 8 7 | 8 | 9 | @pytest.fixture 10 | def vals1(vector_dim): 11 | return [0.1] * vector_dim 12 | 13 | 14 | @pytest.fixture 15 | def vals2(vector_dim): 16 | return [0.2] * vector_dim 17 | 18 | 19 | @pytest.fixture 20 | def sparse_indices_1(): 21 | return [1, 8, 42] 22 | 23 | 24 | @pytest.fixture 25 | def sparse_values_1(): 26 | return [0.8, 0.9, 0.42] 27 | 28 | 29 | @pytest.fixture 30 | def sparse_indices_2(): 31 | return [1, 3, 5] 32 | 33 | 34 | @pytest.fixture 35 | def sparse_values_2(): 36 | return [0.7, 0.3, 0.31415] 37 | 38 | 39 | @pytest.fixture 40 | def md1(): 41 | return {"genre": "action", "year": 2021} 42 | 43 | 44 | @pytest.fixture 45 | def md2(): 46 | return {"genre": "documentary", "year": 2020} 47 | 48 | 49 | @pytest.fixture 50 | def filter1(): 51 | return {"genre": {"$in": ["action"]}} 52 | 53 | 54 | @pytest.fixture 55 | def filter2(): 56 | return {"year": {"$eq": 2020}} 57 | -------------------------------------------------------------------------------- /tests/unit_grpc/test_grpc_index_describe_index_stats.py: -------------------------------------------------------------------------------- 1 | from pinecone import Config 2 | from pinecone.grpc import GRPCIndex 3 | from pinecone.core.grpc.protos.db_data_2025_01_pb2 import DescribeIndexStatsRequest 4 | from pinecone.grpc.utils import dict_to_proto_struct 5 | 6 | 7 | class TestGrpcIndexDescribeIndexStats: 8 | def setup_method(self): 9 | self.config = Config(api_key="test-api-key", host="foo.pinecone.io") 10 | self.index = GRPCIndex( 11 | config=self.config, index_name="example-name", _endpoint_override="test-endpoint" 12 | ) 13 | 14 | def test_describeIndexStats_callWithoutFilter_CalledWithoutFilter(self, mocker): 15 | mocker.patch.object(self.index.runner, "run", autospec=True) 16 | self.index.describe_index_stats() 17 | self.index.runner.run.assert_called_once_with( 18 | self.index.stub.DescribeIndexStats, DescribeIndexStatsRequest(), timeout=None 19 | ) 20 | 21 | def test_describeIndexStats_callWithFilter_CalledWithFilter(self, mocker, filter1): 22 | mocker.patch.object(self.index.runner, "run", autospec=True) 23 | self.index.describe_index_stats(filter=filter1) 24 | self.index.runner.run.assert_called_once_with( 25 | self.index.stub.DescribeIndexStats, 26 | DescribeIndexStatsRequest(filter=dict_to_proto_struct(filter1)), 27 | timeout=None, 28 | ) 29 | -------------------------------------------------------------------------------- /tests/unit_grpc/test_grpc_index_fetch.py: -------------------------------------------------------------------------------- 1 | from pinecone import Config 2 | from pinecone.grpc import GRPCIndex 3 | from pinecone.core.grpc.protos.db_data_2025_01_pb2 import FetchRequest 4 | 5 | 6 | class TestGrpcIndexFetch: 7 | def setup_method(self): 8 | self.config = Config(api_key="test-api-key", host="foo.pinecone.io") 9 | self.index = GRPCIndex( 10 | config=self.config, index_name="example-name", _endpoint_override="test-endpoint" 11 | ) 12 | 13 | def test_fetch_byIds_fetchByIds(self, mocker): 14 | mocker.patch.object(self.index.runner, "run", autospec=True) 15 | self.index.fetch(["vec1", "vec2"]) 16 | self.index.runner.run.assert_called_once_with( 17 | self.index.stub.Fetch, FetchRequest(ids=["vec1", "vec2"]), timeout=None 18 | ) 19 | 20 | def test_fetch_byIdsAndNS_fetchByIdsAndNS(self, mocker): 21 | mocker.patch.object(self.index.runner, "run", autospec=True) 22 | self.index.fetch(["vec1", "vec2"], namespace="ns", timeout=30) 23 | self.index.runner.run.assert_called_once_with( 24 | self.index.stub.Fetch, FetchRequest(ids=["vec1", "vec2"], namespace="ns"), timeout=30 25 | ) 26 | -------------------------------------------------------------------------------- /tests/unit_grpc/test_grpc_index_update.py: -------------------------------------------------------------------------------- 1 | from pinecone import Config 2 | from pinecone.grpc import GRPCIndex 3 | from pinecone.core.grpc.protos.db_data_2025_01_pb2 import UpdateRequest 4 | from pinecone.grpc.utils import dict_to_proto_struct 5 | 6 | 7 | class TestGrpcIndexUpdate: 8 | def setup_method(self): 9 | self.config = Config(api_key="test-api-key", host="foo.pinecone.io") 10 | self.index = GRPCIndex( 11 | config=self.config, index_name="example-name", _endpoint_override="test-endpoint" 12 | ) 13 | 14 | def test_update_byIdAnValues_updateByIdAndValues(self, mocker, vals1): 15 | mocker.patch.object(self.index.runner, "run", autospec=True) 16 | self.index.update(id="vec1", values=vals1, namespace="ns", timeout=30) 17 | self.index.runner.run.assert_called_once_with( 18 | self.index.stub.Update, 19 | UpdateRequest(id="vec1", values=vals1, namespace="ns"), 20 | timeout=30, 21 | ) 22 | 23 | def test_update_byIdAnValuesAsync_updateByIdAndValuesAsync(self, mocker, vals1): 24 | mocker.patch.object(self.index.runner, "run", autospec=True) 25 | self.index.update(id="vec1", values=vals1, namespace="ns", timeout=30, async_req=True) 26 | self.index.runner.run.assert_called_once_with( 27 | self.index.stub.Update.future, 28 | UpdateRequest(id="vec1", values=vals1, namespace="ns"), 29 | timeout=30, 30 | ) 31 | 32 | def test_update_byIdAnValuesAndMetadata_updateByIdAndValuesAndMetadata( 33 | self, mocker, vals1, md1 34 | ): 35 | mocker.patch.object(self.index.runner, "run", autospec=True) 36 | self.index.update("vec1", values=vals1, set_metadata=md1) 37 | self.index.runner.run.assert_called_once_with( 38 | self.index.stub.Update, 39 | UpdateRequest(id="vec1", values=vals1, set_metadata=dict_to_proto_struct(md1)), 40 | timeout=None, 41 | ) 42 | -------------------------------------------------------------------------------- /tests/upgrade/test_all.py: -------------------------------------------------------------------------------- 1 | class TestAll: 2 | def test_all_is_complete(self): 3 | """Test that __all__ is complete and accurate.""" 4 | # Import the module 5 | import pinecone 6 | 7 | # Get all public names (those that don't start with _) 8 | public_names = {name for name in dir(pinecone) if not name.startswith("_")} 9 | 10 | # Get __all__ if it exists, otherwise empty set 11 | all_names = set(getattr(pinecone, "__all__", [])) 12 | 13 | # Check that __all__ exists 14 | assert hasattr(pinecone, "__all__"), "Module should have __all__ defined" 15 | 16 | # Check that all names in __all__ are actually importable 17 | for name in all_names: 18 | assert getattr(pinecone, name) is not None, f"Name {name} in __all__ is not importable" 19 | 20 | # Check that all public names are in __all__ 21 | missing_from_all = public_names - all_names 22 | for name in missing_from_all: 23 | print(f"Public name {name} is not in __all__") 24 | assert not missing_from_all, f"Public names not in __all__: {missing_from_all}" 25 | 26 | # Check that __all__ doesn't contain any private names 27 | private_in_all = {name for name in all_names if name.startswith("_")} 28 | assert not private_in_all, f"Private names in __all__: {private_in_all}" 29 | -------------------------------------------------------------------------------- /tests/upgrade/test_reorganization.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestReorganization: 5 | def test_data(self): 6 | with pytest.warns(DeprecationWarning) as warning_info: 7 | from pinecone.data import Index 8 | 9 | assert Index is not None 10 | assert len(warning_info) > 0 11 | assert "has moved to" in str(warning_info[0].message) 12 | 13 | def test_config(self): 14 | with pytest.warns(DeprecationWarning) as warning_info: 15 | from pinecone.config import PineconeConfig 16 | 17 | assert PineconeConfig is not None 18 | assert len(warning_info) > 0 19 | assert "has moved to" in str(warning_info[0].message) 20 | --------------------------------------------------------------------------------