├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── documentation.md │ └── feature_request.md ├── actions │ └── bump-version │ │ ├── .gitignore │ │ ├── action.js │ │ ├── action.test.js │ │ ├── action.yaml │ │ ├── core.js │ │ ├── index.js │ │ ├── jest.config.json │ │ ├── package-lock.json │ │ └── package.json └── workflows │ ├── add-labels.yaml │ ├── ci.yaml │ └── release.yaml ├── .gitignore ├── .gitmodules ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── codegen └── build-clients.sh ├── go.mod ├── go.sum ├── internal ├── gen │ ├── api_version.go │ ├── db_control │ │ └── db_control_2025-01.oas.go │ ├── db_data │ │ ├── grpc │ │ │ ├── db_data_2025-01.pb.go │ │ │ └── db_data_2025-01_grpc.pb.go │ │ └── rest │ │ │ └── db_data_2025-01.oas.go │ └── inference │ │ └── inference_2025-01.oas.go ├── provider │ ├── header.go │ └── header_test.go ├── useragent │ ├── useragent.go │ └── useragent_test.go ├── utils │ ├── interceptors.go │ └── mocks.go └── version.go ├── justfile └── pinecone ├── client.go ├── client_test.go ├── errors.go ├── index_connection.go ├── index_connection_test.go ├── local_test.go ├── models.go ├── models_test.go ├── suite_runner_test.go └── test_suite.go /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[Bug] " 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is this a new bug?** 11 | In other words: Is this an error, flaw, failure or fault? Please search Github issues and check our [Community Forum](https://community.pinecone.io/) to see if someone has already reported the bug you encountered. 12 | 13 | If this is a request for help or troubleshooting code in your own Pinecone project, please join the [Pinecone Community Forum](https://community.pinecone.io/). 14 | 15 | - [ ] I believe this is a new bug 16 | - [ ] I have searched the existing Github issues and Community Forum, and I could not find an existing post for this bug 17 | 18 | **Describe the bug** 19 | Describe the functionality that was working before but is broken now. 20 | 21 | **Error information** 22 | If you have one, please include the full stack trace here. If not, please share as much as you can about the error. 23 | 24 | **Steps to reproduce the issue locally** 25 | Include steps to reproduce the issue here. If you have sample code or a script that can be used to replicate this issue, please include that as well (including any dependent files to run the code). 26 | 27 | **Environment** 28 | * OS Version: 29 | * Go version: 30 | * Go SDK version: 31 | 32 | **Additional context** 33 | Add any other context about the problem here. 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Pinecone Community Forum 4 | url: https://community.pinecone.io/ 5 | about: For support, please see the community forum. 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation 3 | about: Report an issue in our docs 4 | title: "[Docs] " 5 | labels: 'documentation' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Description** 11 | Describe the issue that you've encountered with our documentation. 12 | 13 | **Suggested solution** 14 | Describe how this issue could be fixed or improved. 15 | **Link to page** 16 | Add a link to the exact documentation page where the issue occurred. 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[Feature Request]" 5 | labels: 'enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **What motivated you to submit this feature request?** 11 | A clear and concise description of why you are requesting this feature - e.g. "Being able to do x would allow me to..." 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/actions/bump-version/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ -------------------------------------------------------------------------------- /.github/actions/bump-version/action.js: -------------------------------------------------------------------------------- 1 | const core = require("./core"); 2 | 3 | function bumpVersion(currentVersion, bumpType, prerelease) { 4 | let newVersion = calculateNewVersion(currentVersion, bumpType); 5 | 6 | if (prerelease) { 7 | newVersion = `${newVersion}.${prerelease}`; 8 | } 9 | core.setOutput("previous_version", currentVersion); 10 | core.setOutput("previous_version_tag", `v${currentVersion}`); 11 | core.setOutput("version", newVersion); 12 | core.setOutput("version_tag", `v${newVersion}`); 13 | } 14 | 15 | function calculateNewVersion(currentVersion, bumpType) { 16 | const [major, minor, patch] = currentVersion.split("."); 17 | let newVersion; 18 | 19 | switch (bumpType) { 20 | case "major": 21 | newVersion = `${parseInt(major) + 1}.0.0`; 22 | break; 23 | case "minor": 24 | newVersion = `${major}.${parseInt(minor) + 1}.0`; 25 | break; 26 | case "patch": 27 | newVersion = `${major}.${minor}.${parseInt(patch) + 1}`; 28 | break; 29 | default: 30 | throw new Error(`Invalid bumpType: ${bumpType}`); 31 | } 32 | 33 | return newVersion; 34 | } 35 | 36 | module.exports = { bumpVersion }; 37 | -------------------------------------------------------------------------------- /.github/actions/bump-version/action.test.js: -------------------------------------------------------------------------------- 1 | const action = require("./action"); 2 | const core = require("./core"); 3 | 4 | jest.mock("./core"); 5 | 6 | describe("bump-version", () => { 7 | test("bump major", () => { 8 | action.bumpVersion("1.2.3", "major", ""); 9 | 10 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.3"); 11 | expect(core.setOutput).toHaveBeenCalledWith( 12 | "previous_version_tag", 13 | "v1.2.3" 14 | ); 15 | expect(core.setOutput).toHaveBeenCalledWith("version", "2.0.0"); 16 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v2.0.0"); 17 | }); 18 | 19 | test("bump minor: existing minor and patch", () => { 20 | action.bumpVersion("1.2.3", "minor", ""); 21 | 22 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.3"); 23 | expect(core.setOutput).toHaveBeenCalledWith( 24 | "previous_version_tag", 25 | "v1.2.3" 26 | ); 27 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.3.0"); 28 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.3.0"); 29 | }); 30 | 31 | test("bump minor: with no patch", () => { 32 | action.bumpVersion("1.2.0", "minor", ""); 33 | 34 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.0"); 35 | expect(core.setOutput).toHaveBeenCalledWith( 36 | "previous_version_tag", 37 | "v1.2.0" 38 | ); 39 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.3.0"); 40 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.3.0"); 41 | }); 42 | 43 | test("bump minor: from existing patch", () => { 44 | action.bumpVersion("2.2.3", "minor", ""); 45 | 46 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "2.2.3"); 47 | expect(core.setOutput).toHaveBeenCalledWith( 48 | "previous_version_tag", 49 | "v2.2.3" 50 | ); 51 | expect(core.setOutput).toHaveBeenCalledWith("version", "2.3.0"); 52 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v2.3.0"); 53 | }); 54 | 55 | test("bump patch: existing patch", () => { 56 | action.bumpVersion("1.2.3", "patch", ""); 57 | 58 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.3"); 59 | expect(core.setOutput).toHaveBeenCalledWith( 60 | "previous_version_tag", 61 | "v1.2.3" 62 | ); 63 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.2.4"); 64 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.2.4"); 65 | }); 66 | 67 | test("bump patch: minor with no patch", () => { 68 | action.bumpVersion("1.2.0", "patch", ""); 69 | 70 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.0"); 71 | expect(core.setOutput).toHaveBeenCalledWith( 72 | "previous_version_tag", 73 | "v1.2.0" 74 | ); 75 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.2.1"); 76 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.2.1"); 77 | }); 78 | 79 | test("bump patch: major with no minor or patch", () => { 80 | action.bumpVersion("1.0.0", "patch", ""); 81 | 82 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.0.0"); 83 | expect(core.setOutput).toHaveBeenCalledWith( 84 | "previous_version_tag", 85 | "v1.0.0" 86 | ); 87 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.0.1"); 88 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.0.1"); 89 | }); 90 | 91 | test("bump patch: major with minor", () => { 92 | action.bumpVersion("1.1.0", "patch", ""); 93 | 94 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.1.0"); 95 | expect(core.setOutput).toHaveBeenCalledWith( 96 | "previous_version_tag", 97 | "v1.1.0" 98 | ); 99 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.1.1"); 100 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.1.1"); 101 | }); 102 | 103 | test("prerelease suffix provided", () => { 104 | action.bumpVersion("1.2.3", "patch", "rc1"); 105 | 106 | expect(core.setOutput).toHaveBeenCalledWith("previous_version", "1.2.3"); 107 | expect(core.setOutput).toHaveBeenCalledWith( 108 | "previous_version_tag", 109 | "v1.2.3" 110 | ); 111 | expect(core.setOutput).toHaveBeenCalledWith("version", "1.2.4.rc1"); 112 | expect(core.setOutput).toHaveBeenCalledWith("version_tag", "v1.2.4.rc1"); 113 | }); 114 | }); 115 | -------------------------------------------------------------------------------- /.github/actions/bump-version/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'pinecone-io/bump-version' 2 | 3 | description: 'Bumps a given semantic version number based on a bumpType and prereleaseSuffix' 4 | 5 | inputs: 6 | currentVersion: 7 | description: 'The current version of the client to bump from' 8 | required: true 9 | bumpType: 10 | description: 'The type of version bump (major, minor, patch)' 11 | required: true 12 | prereleaseSuffix: 13 | description: 'Optional prerelease identifier to append to the version number' 14 | required: false 15 | default: '' 16 | 17 | outputs: 18 | version: 19 | description: 'The new version number' 20 | version_tag: 21 | description: 'The new version tag' 22 | previous_version: 23 | description: 'The previous version number' 24 | previous_version_tag: 25 | description: 'The previous version tag' 26 | 27 | runs: 28 | using: 'node20' 29 | main: 'index.js' -------------------------------------------------------------------------------- /.github/actions/bump-version/core.js: -------------------------------------------------------------------------------- 1 | // Copied these commands out of the github actions toolkit 2 | // because actually depending on @actions/core requires me to check 3 | // in node_modules and 34MB of dependencies, which I don't want to do. 4 | 5 | const fs = require("fs"); 6 | const os = require("os"); 7 | 8 | function getInput(name, options) { 9 | const val = 10 | process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; 11 | if (options && options.required && !val) { 12 | throw new Error(`Input required and not supplied: ${name}`); 13 | } 14 | 15 | if (options && options.trimWhitespace === false) { 16 | return val; 17 | } 18 | 19 | return val.trim(); 20 | } 21 | 22 | function toCommandValue(input) { 23 | if (input === null || input === undefined) { 24 | return ""; 25 | } else if (typeof input === "string" || input instanceof String) { 26 | return input; 27 | } 28 | return JSON.stringify(input); 29 | } 30 | 31 | function prepareKeyValueMessage(key, value) { 32 | const delimiter = `delimiter_${Math.floor(Math.random() * 100000)}`; 33 | const convertedValue = toCommandValue(value); 34 | 35 | // These should realistically never happen, but just in case someone finds a 36 | // way to exploit uuid generation let's not allow keys or values that contain 37 | // the delimiter. 38 | if (key.includes(delimiter)) { 39 | throw new Error( 40 | `Unexpected input: name should not contain the delimiter "${delimiter}"` 41 | ); 42 | } 43 | 44 | if (convertedValue.includes(delimiter)) { 45 | throw new Error( 46 | `Unexpected input: value should not contain the delimiter "${delimiter}"` 47 | ); 48 | } 49 | 50 | return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; 51 | } 52 | 53 | function setOutput(name, value) { 54 | const filePath = process.env["GITHUB_OUTPUT"] || ""; 55 | if (filePath) { 56 | return issueFileCommand("OUTPUT", prepareKeyValueMessage(name, value)); 57 | } 58 | 59 | process.stdout.write(os.EOL); 60 | issueCommand("set-output", { name }, toCommandValue(value)); 61 | } 62 | 63 | function issueFileCommand(command, message) { 64 | const filePath = process.env[`GITHUB_${command}`]; 65 | if (!filePath) { 66 | throw new Error( 67 | `Unable to find environment variable for file command ${command}` 68 | ); 69 | } 70 | if (!fs.existsSync(filePath)) { 71 | throw new Error(`Missing file at path: ${filePath}`); 72 | } 73 | 74 | fs.appendFileSync(filePath, `${toCommandValue(message)}${os.EOL}`, { 75 | encoding: "utf8", 76 | }); 77 | } 78 | 79 | module.exports = { getInput, setOutput }; 80 | -------------------------------------------------------------------------------- /.github/actions/bump-version/index.js: -------------------------------------------------------------------------------- 1 | const action = require("./action"); 2 | const core = require("./core"); 3 | 4 | action.bumpVersion( 5 | core.getInput("currentVersion"), 6 | core.getInput("bumpType"), 7 | core.getInput("prereleaseSuffix") 8 | ); 9 | -------------------------------------------------------------------------------- /.github/actions/bump-version/jest.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "verbose": true 3 | } -------------------------------------------------------------------------------- /.github/actions/bump-version/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bump-version", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest" 8 | }, 9 | "author": "", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "jest": "^29.5.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /.github/workflows/add-labels.yaml: -------------------------------------------------------------------------------- 1 | name: Label issues 2 | on: 3 | issues: 4 | types: 5 | - reopened 6 | - opened 7 | jobs: 8 | label_issues: 9 | runs-on: ubuntu-latest 10 | permissions: 11 | issues: write 12 | steps: 13 | - run: gh issue edit "$NUMBER" --add-label "$LABELS" 14 | env: 15 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | GH_REPO: ${{ github.repository }} 17 | NUMBER: ${{ github.event.issue.number }} 18 | LABELS: status:needs-triage 19 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | pull_request: {} 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | services: 9 | pc-index-serverless: 10 | image: ghcr.io/pinecone-io/pinecone-index:latest 11 | ports: 12 | - 5081:5081 13 | env: 14 | PORT: 5081 15 | DIMENSION: 1536 16 | METRIC: dotproduct 17 | INDEX_TYPE: serverless 18 | pc-index-pod: 19 | image: ghcr.io/pinecone-io/pinecone-index:latest 20 | ports: 21 | - 5082:5082 22 | env: 23 | PORT: 5082 24 | DIMENSION: 1536 25 | METRIC: cosine 26 | INDEX_TYPE: pod 27 | steps: 28 | - uses: actions/checkout@v4 29 | - name: Setup Go 30 | uses: actions/setup-go@v5 31 | with: 32 | go-version: "1.21.x" 33 | - name: Install dependencies 34 | run: | 35 | go get ./pinecone 36 | - name: Run tests 37 | run: go test -count=1 -v ./pinecone 38 | env: 39 | PINECONE_API_KEY: ${{ secrets.API_KEY }} 40 | - name: Run local integration tests 41 | run: go test -count=1 -v ./pinecone -run TestRunLocalIntegrationSuite -tags=localServer 42 | env: 43 | PINECONE_INDEX_URL_POD: http://localhost:5082 44 | PINECONE_INDEX_URL_SERVERLESS: http://localhost:5081 45 | PINECONE_DIMENSION: 1536 46 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release Client 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | ref: 7 | description: "Git ref to tag and release" 8 | required: true 9 | type: string 10 | default: "main" 11 | releaseLevel: 12 | description: "Release level (major, minor, patch)" 13 | required: true 14 | type: choice 15 | default: "patch" 16 | options: 17 | - "patch" # bug fixes 18 | - "minor" # new features, backwards compatible 19 | - "major" # breaking changes 20 | isPrerelease: 21 | description: "Whether this is a prerelease (alpha / beta) client" 22 | required: true 23 | type: boolean 24 | default: true 25 | prereleaseSuffix: 26 | description: "Suffix to add to version number for marking as a pre-release alpha or beta client. Value ignored when isPrerelease is false" 27 | required: false 28 | type: string 29 | default: "" 30 | 31 | jobs: 32 | bump-version-and-release: 33 | runs-on: ubuntu-latest 34 | env: 35 | SEMVER_VERSION: "" # Set by the get_tag_version step 36 | PRERELEASE_SUFFIX: "" # Set by the set_prerelease_suffix step 37 | steps: 38 | - name: Checkout 39 | uses: actions/checkout@v4 40 | with: 41 | fetch-depth: 0 42 | ref: ${{ inputs.ref }} 43 | 44 | - name: Verify prereleaseSuffix not empty if isPrerelease is true 45 | if: ${{ inputs.isPrerelease == true }} 46 | run: | 47 | if [ -z "${{ inputs.prereleaseSuffix }}" ]; then 48 | echo "prereleaseSuffix cannot be empty if isPrerelease is true" 49 | exit 1 50 | fi 51 | 52 | - name: Extract current release version through tag and set SEMVER_VERSION 53 | id: get_tag_version 54 | run: | 55 | tag=$(git describe --tags --abbrev=0) 56 | semver=${tag#v} # Remove the 'v' prefix from version number 57 | echo "Current Released Version: $semver" 58 | echo "SEMVER_VERSION=$semver" >> $GITHUB_ENV # Set as environment variable 59 | 60 | - name: Set PRERELEASE_SUFFIX if isPrerelease is true 61 | id: set_prerelease_suffix 62 | if: ${{ inputs.isPrerelease == true }} 63 | run: | 64 | echo "PRERELEASE_SUFFIX=${{ inputs.prereleaseSuffix }}" >> $GITHUB_ENV 65 | 66 | - name: Bump version 67 | id: bump 68 | uses: "./.github/actions/bump-version" 69 | with: 70 | currentVersion: ${{ env.SEMVER_VERSION}} 71 | bumpType: ${{ inputs.releaseLevel }} 72 | prereleaseSuffix: ${{ env.PRERELEASE_SUFFIX }} 73 | 74 | - name: Verify unique release tag 75 | run: | 76 | TAG_NAME=${{ steps.bump.outputs.VERSION_TAG}} 77 | if git rev-parse -q --verify "refs/tags/$TAG_NAME" >/dev/null; then 78 | echo "Tag $TAG_NAME already exists" 79 | exit 1 80 | fi 81 | 82 | - name: Update version.go file with new version tag 83 | run: | 84 | echo "Updating version.go file with new version tag" 85 | sed -i "s/Version = \".*\"/Version = \"${{ steps.bump.outputs.VERSION_TAG }}\"/" internal/version.go 86 | 87 | - name: Set up Git 88 | run: | 89 | git config --global user.name "Pinecone CI" 90 | git config --global user.email "clients@pinecone.io" 91 | 92 | - name: Discard changes to version file if prerelease 93 | if: ${{ inputs.isPrerelease == true }} 94 | run: | 95 | git checkout -- internal/version.go 96 | 97 | - name: Commit version.go changes if not prerelease 98 | if: ${{ inputs.isPrerelease == false}} 99 | run: | 100 | git add internal/version.go 101 | git commit -m "[skip ci] Bump version to ${{ steps.bump.outputs.VERSION_TAG }}" 102 | 103 | - name: Tag version 104 | run: | 105 | newVersionTag="${{ steps.bump.outputs.VERSION_TAG }}" 106 | git tag -a $newVersionTag -m "Release $newVersionTag" 107 | 108 | - name: Push tags (prerelease) 109 | if: ${{ inputs.isPrerelease == true }} 110 | # We don't push the version.go for prerelease so the tag wouldn't be pushed 111 | # Push just tags up here in that case since Go releases are handled via git tags 112 | run: git push --tags 113 | 114 | - name: Push tags and updated version.go file (production release) 115 | if: ${{ inputs.isPrerelease == false }} 116 | run: git push --follow-tags 117 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/go 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=go 3 | 4 | ### Go ### 5 | # Binaries for programs and plugins 6 | *.exe 7 | *.exe~ 8 | *.dll 9 | *.so 10 | *.dylib 11 | 12 | # Test binary, built with `go test -c` 13 | *.test 14 | 15 | # Output of the go coverage tool, specifically when used with LiteIDE 16 | *.out 17 | 18 | # Dependency directories (remove the comment below to include it) 19 | # vendor/ 20 | 21 | ### Go Patch ### 22 | /vendor/ 23 | /Godeps/ 24 | 25 | # End of https://www.toptal.com/developers/gitignore/api/go 26 | 27 | # Created by https://www.toptal.com/developers/gitignore/api/intellij+all 28 | # Edit at https://www.toptal.com/developers/gitignore?templates=intellij+all 29 | 30 | ### Intellij+all ### 31 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 32 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 33 | 34 | # User-specific stuff 35 | .idea/**/workspace.xml 36 | .idea/**/tasks.xml 37 | .idea/**/usage.statistics.xml 38 | .idea/**/dictionaries 39 | .idea/**/shelf 40 | 41 | # Generated files 42 | .idea/**/contentModel.xml 43 | 44 | # Sensitive or high-churn files 45 | .idea/**/dataSources/ 46 | .idea/**/dataSources.ids 47 | .idea/**/dataSources.local.xml 48 | .idea/**/sqlDataSources.xml 49 | .idea/**/dynamic.xml 50 | .idea/**/uiDesigner.xml 51 | .idea/**/dbnavigator.xml 52 | 53 | # Gradle 54 | .idea/**/gradle.xml 55 | .idea/**/libraries 56 | 57 | # Gradle and Maven with auto-import 58 | # When using Gradle or Maven with auto-import, you should exclude module files, 59 | # since they will be recreated, and may cause churn. Uncomment if using 60 | # auto-import. 61 | # .idea/artifacts 62 | # .idea/compiler.xml 63 | # .idea/jarRepositories.xml 64 | # .idea/modules.xml 65 | # .idea/*.iml 66 | # .idea/modules 67 | # *.iml 68 | # *.ipr 69 | 70 | # CMake 71 | cmake-build-*/ 72 | 73 | # Mongo Explorer plugin 74 | .idea/**/mongoSettings.xml 75 | 76 | # File-based project format 77 | *.iws 78 | 79 | # IntelliJ 80 | out/ 81 | 82 | # mpeltonen/sbt-idea plugin 83 | .idea_modules/ 84 | 85 | # JIRA plugin 86 | atlassian-ide-plugin.xml 87 | 88 | # Cursive Clojure plugin 89 | .idea/replstate.xml 90 | 91 | # Crashlytics plugin (for Android Studio and IntelliJ) 92 | com_crashlytics_export_strings.xml 93 | crashlytics.properties 94 | crashlytics-build.properties 95 | fabric.properties 96 | 97 | # Editor-based Rest Client 98 | .idea/httpRequests 99 | 100 | # Android studio 3.1+ serialized cache file 101 | .idea/caches/build_file_checksums.ser 102 | 103 | ### Intellij+all Patch ### 104 | # Ignores the whole .idea folder and all .iml files 105 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360 106 | 107 | .idea/ 108 | 109 | # Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023 110 | 111 | *.iml 112 | modules.xml 113 | .idea/misc.xml 114 | *.ipr 115 | 116 | # Sonarlint plugin 117 | .idea/sonarlint 118 | 119 | # End of https://www.toptal.com/developers/gitignore/api/intellij+all 120 | 121 | go.work* 122 | .env 123 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "codegen/apis"] 2 | path = codegen/apis 3 | url = git@github.com:pinecone-io/apis.git 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Prereqs 4 | 5 | 1. A [current version of Go](https://go.dev/doc/install) (recommended 1.21+) 6 | 2. The [just](https://github.com/casey/just?tab=readme-ov-file#installation) command runner 7 | 3. The [protobuf-compiler](https://grpc.io/docs/protoc-installation/) 8 | 9 | Then, execute `just bootstrap` to install the necessary Go packages. The packages installed when bootstrapping allow regenerating client code from spec files, and generating Go documentation. 10 | 11 | ## Environment Setup 12 | 13 | At a minimum, you will need to declare a `PINECONE_API_KEY` variable in your environment in order to interact with Pinecone services, and run integration tests locally. If `PINECONE_API_KEY` is available in your environment, the `Client` struct can be created with `NewClient` without any additional configuration parameters. Alternatively, you can pass `ApiKey` as a configuration directly through `NewClientParams`. 14 | 15 | ````shell 16 | ### API Definitions submodule 17 | 18 | The API Definitions are in a private submodule. To checkout or update the submodules, execute the following command in the root of the project: 19 | 20 | ```shell 21 | git submodule update --init --recursive 22 | ```` 23 | 24 | For working with submodules, see the [Git Submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules) 25 | documentation. Note that since the current submodule is private to `pinecone-io`, you will not be able to work directly 26 | with the submodule. 27 | 28 | ## Just commands 29 | 30 | `just test` : Executes all tests (unit & integration) for the pinecone package 31 | 32 | `jest test-unit` : Executes unit tests only for the pinecone package 33 | 34 | `just gen` : Generates Go client code from the API definitions 35 | 36 | `just docs` : Generates Go docs and starts http server on localhost 37 | 38 | `just bootstrap` : Installs necessary go packages for gen and docs 39 | 40 | ## Testing 41 | 42 | The `go-pinecone` codebase includes both unit and integration tests. These tests are kept within the same files, but are 43 | constructed differently. See `/pinecone/index_connection_test.go` and `/pinecone/client_test.go` for examples. They are divided into sections with `// Integration tests: ` near the top, and `// Unit tests:` near the bottom of the file. 44 | 45 | For running tests you can use `just test` to run all tests, and `just test-unit` to only run unit tests. 46 | 47 | ### Unit tests 48 | 49 | Unit tests are generally written using Go's built-in support. You can find a [brief walkthrough](https://go.dev/doc/tutorial/add-a-test) detailing how to write a test. You can also refer to [go.dev/doc/code#Testing](https://go.dev/doc/code#Testing). 50 | 51 | When adding unit tests, make sure to postfix `"Unit"` to the test function name in order for the test to be picked up by the `just test-unit` command. For example: 52 | 53 | ```Go 54 | func TestNewClientParamsSetUnit(t *testing.T) { 55 | apiKey := "test-api-key" 56 | client, err := NewClient(NewClientParams{ApiKey: apiKey}) 57 | 58 | require.NoError(t, err) 59 | require.Empty(t, client.sourceTag, "Expected client to have empty sourceTag") 60 | require.NotNil(t, client.headers, "Expected client headers to not be nil") 61 | apiKeyHeader, ok := client.headers["Api-Key"] 62 | require.True(t, ok, "Expected client to have an 'Api-Key' header") 63 | require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") 64 | require.Equal(t, 3, len(client.restClient.RequestEditors), "Expected client to have correct number of request editors") 65 | } 66 | ``` 67 | 68 | ### Integration Tests 69 | 70 | For integration tests we use the `stretchr/testify` module, specifically for the `suite`, `assert`, and `require` packages. You can find the source code and documentation on GitHub: [https://github.com/stretchr/testify](https://github.com/stretchr/testify). 71 | 72 | There are two files that define the integration test suite, and include code that manages setup and teardown of external Index resources before and after the integration suites execute. 73 | 74 | - `./pinecone/test_suite.go` 75 | - `./pinecone/suite_runner_test.go` 76 | 77 | `test_suite.go` includes the definition of the `IntegrationTests` struct which embeds `suite.Suite` from testify. This file also includes `SetupSuite` and `TearDownSuite` methods, along with utility functions for things like index creation and upserting vectors. 78 | 79 | `suite_runner_test.go` is the primary entrypoint for the integration tests being run: 80 | 81 | ```Go 82 | // This is the entry point for all integration tests 83 | // This test function is picked up by go test and triggers the suite runs 84 | func TestRunSuites(t *testing.T) { 85 | RunSuites(t) 86 | } 87 | ``` 88 | 89 | In `RunSuites` we create two different `IntegrationTests` for pod and serverless indexes. 90 | 91 | As mentioned above, integration tests are written in the same files as unit tests. However, integration tests must be defined as methods on the `IntegrationTests` struct: 92 | 93 | ```Go 94 | type IntegrationTests struct { 95 | suite.Suite 96 | apiKey string 97 | client *Client 98 | host string 99 | dimension int32 100 | indexType string 101 | vectorIds []string 102 | idxName string 103 | idxConn *IndexConnection 104 | collectionName string 105 | sourceTag string 106 | } 107 | 108 | // Integration tests: 109 | func (ts *IntegrationTests) TestListIndexes() { 110 | indexes, err := ts.client.ListIndexes(context.Background()) 111 | require.NoError(ts.T(), err) 112 | require.Greater(ts.T(), len(indexes), 0, "Expected at least one index to exist") 113 | } 114 | ``` 115 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /codegen/build-clients.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | version=$1 # e.g. 2024-07 4 | 5 | # modules 6 | db_control_module="db_control" 7 | db_data_module="db_data" 8 | inference_module="inference" 9 | 10 | # generated grpc output destination paths 11 | # db_data_destination must align with the option go_package in the proto file: 12 | # https://github.com/pinecone-io/apis/blob/d1d005e75cc9fe9a5c486ef9218fe87b57765961/src/release/db/data/data.proto#L3 13 | db_data_destination="internal/gen/${db_data_module}" 14 | db_control_destination="internal/gen/${db_control_module}" 15 | inference_destination="internal/gen/${inference_module}" 16 | 17 | # version file 18 | version_file="internal/gen/api_version.go" 19 | 20 | # generated oas file destination paths 21 | db_data_rest_destination="${db_data_destination}/rest" 22 | db_data_oas_file="${db_data_rest_destination}/${db_data_module}_${version}.oas.go" 23 | db_control_oas_file="${db_control_destination}/${db_control_module}_${version}.oas.go" 24 | inference_oas_file="${inference_destination}/${inference_module}_${version}.oas.go" 25 | 26 | set -eux -o pipefail 27 | 28 | update_apis_repo() { 29 | echo "Updating apis repo" 30 | pushd codegen/apis 31 | git fetch 32 | git checkout main 33 | git pull 34 | just clean 35 | just build 36 | popd 37 | } 38 | 39 | verify_spec_version() { 40 | local version=$1 41 | echo "Verifying spec version has been provided: $version" 42 | if [ -z "$version" ]; then 43 | echo "Version is required" 44 | exit 1 45 | fi 46 | 47 | verify_directory_exists "codegen/apis/_build/${version}" 48 | } 49 | 50 | verify_directory_exists() { 51 | local directory=$1 52 | if [ ! -d "$directory" ]; then 53 | echo "Directory does not exist at $directory" 54 | exit 1 55 | fi 56 | } 57 | 58 | generate_oas_client() { 59 | local module=$1 60 | local destination=$2 61 | 62 | # source oas file for module and version 63 | oas_file="codegen/apis/_build/${version}/${module}_${version}.oas.yaml" 64 | 65 | oapi-codegen --package=${module} \ 66 | --generate types,client \ 67 | "${oas_file}" > "${destination}" 68 | } 69 | 70 | generate_proto_client() { 71 | local module=$1 72 | 73 | # source proto file for module and version 74 | proto_file="codegen/apis/_build/${version}/${module}_${version}.proto" 75 | 76 | protoc --experimental_allow_proto3_optional \ 77 | --proto_path=codegen/apis/vendor/protos \ 78 | --proto_path=codegen/apis/_build/${version} \ 79 | --go_opt=module="github.com/pinecone-io/go-pinecone" \ 80 | --go-grpc_opt=module="github.com/pinecone-io/go-pinecone" \ 81 | --go_out=. \ 82 | --go-grpc_out=. \ 83 | "${proto_file}" 84 | } 85 | 86 | generate_version_file() { 87 | echo "Generating version file" 88 | cat > "${version_file}" <= 'a' && char <= 'z') || (char >= '0' && char <= '9') || char == '_' || char == ' ' || char == ':' { 41 | strBldr.WriteRune(char) 42 | } 43 | } 44 | userAgent = strBldr.String() 45 | 46 | // Trim left/right whitespace 47 | userAgent = strings.TrimSpace(userAgent) 48 | 49 | // Condense multiple spaces to one, and replace with underscore 50 | userAgent = strings.Join(strings.Fields(userAgent), "_") 51 | 52 | return fmt.Sprintf("; source_tag=%s;", userAgent) 53 | } 54 | -------------------------------------------------------------------------------- /internal/useragent/useragent_test.go: -------------------------------------------------------------------------------- 1 | package useragent 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "testing" 7 | ) 8 | 9 | func TestBuildUserAgentNoSourceTag(t *testing.T) { 10 | sourceTag := "" 11 | expectedStartWith := fmt.Sprintf("go-client/%s", getPackageVersion()) 12 | result := BuildUserAgent(sourceTag) 13 | if !strings.HasPrefix(result, expectedStartWith) { 14 | t.Errorf("BuildUserAgent(): expected user-agent to start with %s, but got %s", expectedStartWith, result) 15 | } 16 | if strings.Contains(result, "source_tag") { 17 | t.Errorf("BuildUserAgent(): expected user-agent to not contain 'source_tag', but got %s", result) 18 | } 19 | } 20 | 21 | func TestBuildUserAgentWithSourceTag(t *testing.T) { 22 | sourceTag := "my_source_tag" 23 | expectedStartWith := fmt.Sprintf("go-client/%s", getPackageVersion()) 24 | result := BuildUserAgent(sourceTag) 25 | if !strings.HasPrefix(result, expectedStartWith) { 26 | t.Errorf("BuildUserAgent(): expected user-agent to start with %s, but got %s", expectedStartWith, result) 27 | } 28 | if !strings.Contains(result, "source_tag=my_source_tag") { 29 | t.Errorf("BuildUserAgent(): expected user-agent to contain 'source_tag=my_source_tag', but got %s", result) 30 | } 31 | } 32 | 33 | func TestBuildUserAgentGRPCNoSourceTag(t *testing.T) { 34 | sourceTag := "" 35 | expectedStartWith := fmt.Sprintf("go-client[grpc]/%s", getPackageVersion()) 36 | result := BuildUserAgentGRPC(sourceTag) 37 | if !strings.HasPrefix(result, expectedStartWith) { 38 | t.Errorf("BuildUserAgent(): expected user-agent to start with %s, but got %s", expectedStartWith, result) 39 | } 40 | if strings.Contains(result, "source_tag") { 41 | t.Errorf("BuildUserAgent(): expected user-agent to not contain 'source_tag', but got %s", result) 42 | } 43 | } 44 | 45 | func TestBuildUserAgentGRPCWithSourceTag(t *testing.T) { 46 | sourceTag := "my_source_tag" 47 | expectedStartWith := fmt.Sprintf("go-client[grpc]/%s", getPackageVersion()) 48 | result := BuildUserAgentGRPC(sourceTag) 49 | if !strings.HasPrefix(result, expectedStartWith) { 50 | t.Errorf("BuildUserAgent(): expected user-agent to start with %s, but got %s", expectedStartWith, result) 51 | } 52 | if !strings.Contains(result, "source_tag=my_source_tag") { 53 | t.Errorf("BuildUserAgent(): expected user-agent to contain 'source_tag=my_source_tag', but got %s", result) 54 | } 55 | } 56 | 57 | func TestBuildUserAgentSourceTagIsNormalized(t *testing.T) { 58 | sourceTag := "my source tag!!!!" 59 | result := BuildUserAgent(sourceTag) 60 | if !strings.Contains(result, "source_tag=my_source_tag") { 61 | t.Errorf("BuildUserAgent(\"%s\"): expected user-agent to contain 'source_tag=my_source_tag', but got %s", sourceTag, result) 62 | } 63 | 64 | sourceTag = "My Source Tag" 65 | result = BuildUserAgent(sourceTag) 66 | if !strings.Contains(result, "source_tag=my_source_tag") { 67 | t.Errorf("BuildUserAgent(\"%s\"): expected user-agent to contain 'source_tag=my_source_tag', but got %s", sourceTag, result) 68 | } 69 | 70 | sourceTag = " My Source Tag 123 " 71 | result = BuildUserAgent(sourceTag) 72 | if !strings.Contains(result, "source_tag=my_source_tag") { 73 | t.Errorf("BuildUserAgent(\"%s\"): expected user-agent to contain 'source_tag=my_source_tag_123', but got %s", sourceTag, result) 74 | } 75 | 76 | sourceTag = " My Source Tag 123 #### !! " 77 | result = BuildUserAgent(sourceTag) 78 | if !strings.Contains(result, "source_tag=my_source_tag") { 79 | t.Errorf("BuildUserAgent(\"%s\"): expected user-agent to contain 'source_tag=my_source_tag_123', but got %s", sourceTag, result) 80 | } 81 | 82 | sourceTag = " My Source Tag 123 : !! " 83 | if !strings.Contains(result, "source_tag=my_source_tag:") { 84 | t.Errorf("BuildUserAgent(\"%s\"): expected user-agent to contain 'source_tag=my_source_tag_123', but got %s", sourceTag, result) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /internal/utils/interceptors.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | "strings" 6 | "testing" 7 | 8 | "google.golang.org/grpc" 9 | "google.golang.org/grpc/metadata" 10 | ) 11 | 12 | // MetadataInterceptor is a grpc.UnaryClientInterceptor that extracts the gRPC metadata 13 | // from the outgoing RPC request context so we can assert on it 14 | func MetadataInterceptor(t *testing.T, expectedMetadata map[string]string) grpc.UnaryClientInterceptor { 15 | return func( 16 | ctx context.Context, 17 | method string, 18 | req any, 19 | reply any, 20 | cc *grpc.ClientConn, 21 | invoker grpc.UnaryInvoker, 22 | opts ...grpc.CallOption, 23 | ) error { 24 | metadata, _ := metadata.FromOutgoingContext(ctx) 25 | metadataString := mdToString(metadata) 26 | 27 | // Check that the outgoing context has the metadata we expect 28 | for key, value := range expectedMetadata { 29 | if !strings.Contains(metadataString, key) || !strings.Contains(metadataString, value) { 30 | t.Fatalf("MetadataInterceptor: expected to find key %s with value %s in metadata, but found %s", key, value, metadataString) 31 | } 32 | } 33 | 34 | return nil 35 | } 36 | } 37 | 38 | func mdToString(md metadata.MD) string { 39 | builder := strings.Builder{} 40 | for key, values := range md { 41 | builder.WriteString(key + ": [") 42 | builder.WriteString(strings.Join(values, ", ")) 43 | builder.WriteString("]\n") 44 | } 45 | 46 | return builder.String() 47 | } 48 | -------------------------------------------------------------------------------- /internal/utils/mocks.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bytes" 5 | "io" 6 | "net/http" 7 | ) 8 | 9 | type MockTransport struct { 10 | Req *http.Request 11 | Resp *http.Response 12 | Err error 13 | } 14 | 15 | func (m *MockTransport) RoundTrip(req *http.Request) (*http.Response, error) { 16 | m.Req = req 17 | return m.Resp, m.Err 18 | } 19 | 20 | func CreateMockClient(jsonBody string) *http.Client { 21 | return &http.Client{ 22 | Transport: &MockTransport{ 23 | Resp: &http.Response{ 24 | StatusCode: 200, 25 | Body: io.NopCloser(bytes.NewReader([]byte(jsonBody))), 26 | Header: make(http.Header), 27 | }, 28 | }, 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /internal/version.go: -------------------------------------------------------------------------------- 1 | package internal 2 | 3 | // Version is the current version of the Pinecone client and is set during the release process. 4 | // DO NOT EDIT 5 | var Version = "v3.1.0" -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | api_version := "2025-01" 2 | 3 | test: 4 | #!/usr/bin/env bash 5 | set -o allexport 6 | source .env 7 | set +o allexport 8 | go test -count=1 -v ./pinecone 9 | test-unit: 10 | #!/usr/bin/env bash 11 | set -o allexport 12 | source .env 13 | set +o allexport 14 | go test -v -run Unit ./pinecone 15 | bootstrap: 16 | go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.32 17 | go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.3 18 | go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@v2.3.0 19 | go install golang.org/x/tools/cmd/godoc@latest 20 | gen: 21 | ./codegen/build-clients.sh {{api_version}} 22 | docs: 23 | @echo "Serving docs at http://localhost:6060/pkg/github.com/pinecone-io/go-pinecone/v3/pinecone/" 24 | @godoc -http=:6060 >/dev/null 25 | -------------------------------------------------------------------------------- /pinecone/errors.go: -------------------------------------------------------------------------------- 1 | package pinecone 2 | 3 | import "fmt" 4 | 5 | type PineconeError struct { 6 | Code int 7 | Msg error 8 | } 9 | 10 | func (pe *PineconeError) Error() string { 11 | return fmt.Sprintf("%+v", pe.Msg) 12 | } 13 | -------------------------------------------------------------------------------- /pinecone/index_connection_test.go: -------------------------------------------------------------------------------- 1 | package pinecone 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "fmt" 7 | "log" 8 | "testing" 9 | 10 | db_data_grpc "github.com/pinecone-io/go-pinecone/v3/internal/gen/db_data/grpc" 11 | "github.com/pinecone-io/go-pinecone/v3/internal/utils" 12 | "google.golang.org/grpc" 13 | "google.golang.org/grpc/metadata" 14 | "google.golang.org/protobuf/types/known/structpb" 15 | 16 | "github.com/stretchr/testify/assert" 17 | "github.com/stretchr/testify/require" 18 | ) 19 | 20 | // Integration tests 21 | func (ts *IntegrationTests) TestFetchVectors() { 22 | ctx := context.Background() 23 | res, err := ts.idxConn.FetchVectors(ctx, ts.vectorIds) 24 | assert.NoError(ts.T(), err) 25 | assert.NotNil(ts.T(), res) 26 | } 27 | 28 | func (ts *IntegrationTests) TestQueryByVector() { 29 | vec := make([]float32, derefOrDefault(ts.dimension, 0)) 30 | for i := range vec { 31 | vec[i] = 0.01 32 | } 33 | 34 | req := &QueryByVectorValuesRequest{ 35 | Vector: vec, 36 | TopK: 5, 37 | IncludeValues: true, 38 | IncludeMetadata: true, 39 | } 40 | 41 | retryAssertionsWithDefaults(ts.T(), func() error { 42 | ctx := context.Background() 43 | res, err := ts.idxConn.QueryByVectorValues(ctx, req) 44 | if err != nil { 45 | return fmt.Errorf("QueryByVectorValues failed: %v", err) 46 | } 47 | if res == nil { 48 | return fmt.Errorf("QueryByVectorValues response is nil") 49 | } 50 | 51 | assert.NoError(ts.T(), err) 52 | assert.NotNil(ts.T(), res) 53 | return nil 54 | }) 55 | } 56 | 57 | func (ts *IntegrationTests) TestQueryById() { 58 | req := &QueryByVectorIdRequest{ 59 | VectorId: ts.vectorIds[0], 60 | TopK: 5, 61 | } 62 | 63 | ctx := context.Background() 64 | res, err := ts.idxConn.QueryByVectorId(ctx, req) 65 | assert.NoError(ts.T(), err) 66 | assert.NotNil(ts.T(), res) 67 | } 68 | 69 | func (ts *IntegrationTests) TestDeleteVectorsById() { 70 | ctx := context.Background() 71 | err := ts.idxConn.DeleteVectorsById(ctx, ts.vectorIds) 72 | assert.NoError(ts.T(), err) 73 | ts.vectorIds = []string{} 74 | 75 | vectors := generateVectors(5, derefOrDefault(ts.dimension, 0), false, nil) 76 | 77 | _, err = ts.idxConn.UpsertVectors(ctx, vectors) 78 | if err != nil { 79 | log.Fatalf("Failed to upsert vectors in TestDeleteVectorsById test. Error: %v", err) 80 | } 81 | 82 | vectorIds := make([]string, len(vectors)) 83 | for i, v := range vectors { 84 | vectorIds[i] = v.Id 85 | } 86 | 87 | ts.vectorIds = append(ts.vectorIds, vectorIds...) 88 | } 89 | 90 | func (ts *IntegrationTests) TestDeleteVectorsByFilter() { 91 | metadataFilter := map[string]interface{}{ 92 | "genre": "classical", 93 | } 94 | filter, err := structpb.NewStruct(metadataFilter) 95 | if err != nil { 96 | ts.FailNow(fmt.Sprintf("Failed to create metadata filter: %v", err)) 97 | } 98 | 99 | ctx := context.Background() 100 | _ = ts.idxConn.DeleteVectorsByFilter(ctx, filter) 101 | 102 | ts.vectorIds = []string{} 103 | 104 | vectors := generateVectors(5, derefOrDefault(ts.dimension, 0), false, nil) 105 | 106 | _, err = ts.idxConn.UpsertVectors(ctx, vectors) 107 | if err != nil { 108 | log.Fatalf("Failed to upsert vectors in TestDeleteVectorsByFilter test. Error: %v", err) 109 | } 110 | 111 | vectorIds := make([]string, len(vectors)) 112 | for i, v := range vectors { 113 | vectorIds[i] = v.Id 114 | } 115 | 116 | ts.vectorIds = append(ts.vectorIds, vectorIds...) 117 | } 118 | 119 | func (ts *IntegrationTests) TestDeleteAllVectorsInNamespace() { 120 | ctx := context.Background() 121 | err := ts.idxConn.DeleteAllVectorsInNamespace(ctx) 122 | assert.NoError(ts.T(), err) 123 | ts.vectorIds = []string{} 124 | 125 | vectors := generateVectors(5, derefOrDefault(ts.dimension, 0), false, nil) 126 | 127 | _, err = ts.idxConn.UpsertVectors(ctx, vectors) 128 | if err != nil { 129 | log.Fatalf("Failed to upsert vectors in TestDeleteAllVectorsInNamespace test. Error: %v", err) 130 | } 131 | 132 | vectorIds := make([]string, len(vectors)) 133 | for i, v := range vectors { 134 | vectorIds[i] = v.Id 135 | } 136 | 137 | ts.vectorIds = append(ts.vectorIds, vectorIds...) 138 | 139 | } 140 | 141 | func (ts *IntegrationTests) TestDescribeIndexStats() { 142 | ctx := context.Background() 143 | res, err := ts.idxConn.DescribeIndexStats(ctx) 144 | assert.NoError(ts.T(), err) 145 | assert.NotNil(ts.T(), res) 146 | } 147 | 148 | func (ts *IntegrationTests) TestDescribeIndexStatsFiltered() { 149 | ctx := context.Background() 150 | res, err := ts.idxConn.DescribeIndexStatsFiltered(ctx, &MetadataFilter{}) 151 | assert.NoError(ts.T(), err) 152 | assert.NotNil(ts.T(), res) 153 | } 154 | 155 | func (ts *IntegrationTests) TestListVectors() { 156 | ts.T().Skip() 157 | req := &ListVectorsRequest{} 158 | 159 | ctx := context.Background() 160 | res, err := ts.idxConn.ListVectors(ctx, req) 161 | assert.NoError(ts.T(), err) 162 | assert.NotNil(ts.T(), res) 163 | } 164 | 165 | func (ts *IntegrationTests) TestMetadataAppliedToRequests() { 166 | apiKey := "test-api-key" 167 | namespace := "test-namespace" 168 | sourceTag := "test-source-tag" 169 | additionalMetadata := map[string]string{"api-key": apiKey, "test-meta": "test-value"} 170 | 171 | idxConn, err := newIndexConnection(newIndexParameters{ 172 | additionalMetadata: additionalMetadata, 173 | host: ts.host, 174 | namespace: namespace, 175 | sourceTag: sourceTag, 176 | }, 177 | grpc.WithUnaryInterceptor(utils.MetadataInterceptor(ts.T(), additionalMetadata)), 178 | ) 179 | 180 | require.NoError(ts.T(), err) 181 | apiKeyHeader, ok := idxConn.additionalMetadata["api-key"] 182 | require.True(ts.T(), ok, "Expected client to have an 'api-key' header") 183 | require.Equal(ts.T(), apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) 184 | require.Equal(ts.T(), namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) 185 | require.NotNil(ts.T(), idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") 186 | require.NotNil(ts.T(), idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") 187 | 188 | // initiate request to trigger the MetadataInterceptor 189 | stats, err := idxConn.DescribeIndexStats(context.Background()) 190 | if err != nil { 191 | ts.FailNow(fmt.Sprintf("Failed to describe index stats: %v", err)) 192 | } 193 | 194 | require.NotNil(ts.T(), stats) 195 | } 196 | 197 | func (ts *IntegrationTests) TestUpdateVectorValues() { 198 | ctx := context.Background() 199 | 200 | expectedVals := []float32{7.2, 7.2, 7.2, 7.2, 7.2} 201 | err := ts.idxConn.UpdateVector(ctx, &UpdateVectorRequest{ 202 | Id: ts.vectorIds[0], 203 | Values: expectedVals, 204 | }) 205 | assert.NoError(ts.T(), err) 206 | 207 | retryAssertionsWithDefaults(ts.T(), func() error { 208 | vector, err := ts.idxConn.FetchVectors(ctx, []string{ts.vectorIds[0]}) 209 | if err != nil { 210 | return fmt.Errorf(fmt.Sprintf("Failed to fetch vector: %v", err)) 211 | } 212 | 213 | if len(vector.Vectors) > 0 { 214 | actualVals := vector.Vectors[ts.vectorIds[0]].Values 215 | if actualVals != nil { 216 | if !slicesEqual[float32](expectedVals, *actualVals) { 217 | return fmt.Errorf("Values do not match") 218 | } else { 219 | return nil // Test passed 220 | } 221 | } else { 222 | return fmt.Errorf("Values are nil after UpdateVector->FetchVector") 223 | } 224 | } else { 225 | return fmt.Errorf("No vectors found after UpdateVector->FetchVector") 226 | } 227 | }) 228 | } 229 | 230 | func (ts *IntegrationTests) TestUpdateVectorMetadata() { 231 | ctx := context.Background() 232 | 233 | expectedMetadata := map[string]interface{}{ 234 | "genre": "death-metal", 235 | } 236 | expectedMetadataMap, err := structpb.NewStruct(expectedMetadata) 237 | if err != nil { 238 | ts.FailNow(fmt.Sprintf("Failed to create metadata map: %v", err)) 239 | } 240 | 241 | err = ts.idxConn.UpdateVector(ctx, &UpdateVectorRequest{ 242 | Id: ts.vectorIds[0], 243 | Metadata: expectedMetadataMap, 244 | }) 245 | assert.NoError(ts.T(), err) 246 | 247 | // TODO: re-enable once serverless freshness is more stable 248 | if ts.indexType != "serverless" { 249 | retryAssertionsWithDefaults(ts.T(), func() error { 250 | vectors, err := ts.idxConn.FetchVectors(ctx, []string{ts.vectorIds[0]}) 251 | if err != nil { 252 | return fmt.Errorf("Failed to fetch vector: %v", err) 253 | } 254 | 255 | if vectors != nil && len(vectors.Vectors) > 0 { 256 | vector := vectors.Vectors[ts.vectorIds[0]] 257 | if vector == nil { 258 | return fmt.Errorf("Fetched vector is nil after UpdateVector->FetchVector") 259 | } 260 | if vector.Metadata == nil { 261 | return fmt.Errorf("Metadata is nil after update") 262 | } 263 | 264 | expectedGenre := expectedMetadataMap.Fields["genre"].GetStringValue() 265 | actualGenre := vector.Metadata.Fields["genre"].GetStringValue() 266 | 267 | if expectedGenre != actualGenre { 268 | return fmt.Errorf("Metadata does not match") 269 | } 270 | } else { 271 | return fmt.Errorf("No vectors found after update") 272 | } 273 | return nil // Test passed 274 | }) 275 | } 276 | } 277 | 278 | func (ts *IntegrationTests) TestUpdateVectorSparseValues() { 279 | ctx := context.Background() 280 | 281 | dims := int32(derefOrDefault(ts.dimension, 0)) 282 | indices := generateUint32Array(int(dims)) 283 | vals := generateFloat32Array(int(dims)) 284 | expectedSparseValues := SparseValues{ 285 | Indices: indices, 286 | Values: vals, 287 | } 288 | 289 | fmt.Printf("Updating sparse values in host \"%s\"...\n", ts.host) 290 | err := ts.idxConn.UpdateVector(ctx, &UpdateVectorRequest{ 291 | Id: ts.vectorIds[0], 292 | SparseValues: &expectedSparseValues, 293 | }) 294 | require.NoError(ts.T(), err) 295 | 296 | // TODO: re-enable once serverless freshness is more stable 297 | if ts.indexType != "serverless" { 298 | // Fetch updated vector and verify sparse values 299 | retryAssertionsWithDefaults(ts.T(), func() error { 300 | vectors, err := ts.idxConn.FetchVectors(ctx, []string{ts.vectorIds[0]}) 301 | if err != nil { 302 | return fmt.Errorf("Failed to fetch vector: %v", err) 303 | } 304 | 305 | vector := vectors.Vectors[ts.vectorIds[0]] 306 | 307 | if vector == nil { 308 | return fmt.Errorf("Fetched vector is nil after UpdateVector->FetchVector") 309 | } 310 | if vector.SparseValues == nil { 311 | return fmt.Errorf("Sparse values are nil after UpdateVector->FetchVector") 312 | } 313 | actualSparseValues := vector.SparseValues.Values 314 | 315 | if !slicesEqual[float32](expectedSparseValues.Values, actualSparseValues) { 316 | return fmt.Errorf("Sparse values do not match") 317 | } 318 | return nil // Test passed 319 | }) 320 | } 321 | } 322 | 323 | func (ts *IntegrationTests) TestImportFlowHappyPath() { 324 | if ts.indexType != "serverless" { 325 | ts.T().Skip("Skipping import flow test for non-serverless index") 326 | } 327 | 328 | testImportUri := "s3://dev-bulk-import-datasets-pub/10-records-dim-10/" 329 | ctx := context.Background() 330 | errorMode := "continue" 331 | 332 | startRes, err := ts.idxConn.StartImport(ctx, testImportUri, nil, (*ImportErrorMode)(&errorMode)) 333 | assert.NoError(ts.T(), err) 334 | assert.NotNil(ts.T(), startRes) 335 | 336 | assert.NotNil(ts.T(), startRes.Id) 337 | describeRes, err := ts.idxConn.DescribeImport(ctx, startRes.Id) 338 | assert.NoError(ts.T(), err) 339 | assert.NotNil(ts.T(), describeRes) 340 | assert.Equal(ts.T(), startRes.Id, describeRes.Id) 341 | 342 | limit := int32(10) 343 | listRes, err := ts.idxConn.ListImports(ctx, &limit, nil) 344 | assert.NoError(ts.T(), err) 345 | assert.NotNil(ts.T(), listRes) 346 | 347 | err = ts.idxConn.CancelImport(ctx, startRes.Id) 348 | assert.NoError(ts.T(), err) 349 | } 350 | 351 | func (ts *IntegrationTests) TestImportFlowNoUriError() { 352 | if ts.indexType != "serverless" { 353 | ts.T().Skip("Skipping import flow test for non-serverless index") 354 | } 355 | 356 | ctx := context.Background() 357 | _, err := ts.idxConn.StartImport(ctx, "", nil, nil) 358 | assert.Error(ts.T(), err) 359 | assert.Contains(ts.T(), err.Error(), "must specify a uri") 360 | } 361 | 362 | func (ts *IntegrationTests) TestIntegratedInference() { 363 | if ts.indexType != "serverless" { 364 | ts.T().Skip("Running TestIntegratedInference once") 365 | } 366 | indexName := "test-integrated-" + generateTestIndexName() 367 | 368 | // create integrated index 369 | ctx := context.Background() 370 | index, err := ts.client.CreateIndexForModel(ctx, &CreateIndexForModelRequest{ 371 | Name: indexName, 372 | Cloud: "aws", 373 | Region: "us-east-1", 374 | Embed: CreateIndexForModelEmbed{ 375 | Model: "multilingual-e5-large", 376 | FieldMap: map[string]interface{}{"text": "chunk_text"}, 377 | }, 378 | }) 379 | assert.NoError(ts.T(), err) 380 | assert.NotNil(ts.T(), index) 381 | 382 | defer func(ts *IntegrationTests, name string) { 383 | err := ts.deleteIndex(name) 384 | 385 | require.NoError(ts.T(), err) 386 | }(ts, indexName) 387 | 388 | // upsert records/documents 389 | records := []*IntegratedRecord{ 390 | { 391 | "_id": "rec1", 392 | "chunk_text": "Apple's first product, the Apple I, was released in 1976 and was hand-built by co-founder Steve Wozniak.", 393 | "category": "product", 394 | }, 395 | { 396 | "_id": "rec2", 397 | "chunk_text": "Apples are a great source of dietary fiber, which supports digestion and helps maintain a healthy gut.", 398 | "category": "nutrition", 399 | }, 400 | { 401 | "_id": "rec3", 402 | "chunk_text": "Apples originated in Central Asia and have been cultivated for thousands of years, with over 7,500 varieties available today.", 403 | "category": "cultivation", 404 | }, 405 | { 406 | "_id": "rec4", 407 | "chunk_text": "In 2001, Apple released the iPod, which transformed the music industry by making portable music widely accessible.", 408 | "category": "product", 409 | }, 410 | { 411 | "_id": "rec5", 412 | "chunk_text": "Apple went public in 1980, making history with one of the largest IPOs at that time.", 413 | "category": "milestone", 414 | }, 415 | { 416 | "_id": "rec6", 417 | "chunk_text": "Rich in vitamin C and other antioxidants, apples contribute to immune health and may reduce the risk of chronic diseases.", 418 | "category": "nutrition", 419 | }, 420 | { 421 | "_id": "rec7", 422 | "chunk_text": "Known for its design-forward products, Apple's branding and market strategy have greatly influenced the technology sector and popularized minimalist design worldwide.", 423 | "category": "influence", 424 | }, 425 | { 426 | "_id": "rec8", 427 | "chunk_text": "The high fiber content in apples can also help regulate blood sugar levels, making them a favorable snack for people with diabetes.", 428 | "category": "nutrition", 429 | }, 430 | } 431 | err = ts.idxConn.UpsertRecords(ctx, records) 432 | assert.NoError(ts.T(), err) 433 | 434 | retryAssertionsWithDefaults(ts.T(), func() error { 435 | res, err := ts.idxConn.SearchRecords(ctx, &SearchRecordsRequest{ 436 | Query: SearchRecordsQuery{ 437 | TopK: 5, 438 | Inputs: &map[string]interface{}{ 439 | "text": "Disease prevention", 440 | }, 441 | }, 442 | }) 443 | if err != nil { 444 | return fmt.Errorf("failed to search records: %v", err) 445 | } 446 | if res == nil { 447 | return fmt.Errorf("result is nil") 448 | } 449 | return nil 450 | }) 451 | } 452 | 453 | // Unit tests: 454 | func TestUpdateVectorMissingReqdFieldsUnit(t *testing.T) { 455 | ctx := context.Background() 456 | idxConn := &IndexConnection{} 457 | err := idxConn.UpdateVector(ctx, &UpdateVectorRequest{}) 458 | assert.Error(t, err) 459 | assert.Contains(t, err.Error(), "a vector ID plus at least one of Values, SparseValues, or Metadata must be provided to update a vector") 460 | } 461 | 462 | func TestNewIndexConnection(t *testing.T) { 463 | apiKey := "test-api-key" 464 | host := "test-host.io" 465 | namespace := "" 466 | sourceTag := "" 467 | additionalMetadata := map[string]string{"api-key": apiKey} 468 | idxConn, err := newIndexConnection(newIndexParameters{ 469 | additionalMetadata: additionalMetadata, 470 | host: host, 471 | namespace: namespace, 472 | sourceTag: sourceTag}) 473 | 474 | require.NoError(t, err) 475 | apiKeyHeader, ok := idxConn.additionalMetadata["api-key"] 476 | require.True(t, ok, "Expected client to have an 'api-key' header") 477 | require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) 478 | require.Empty(t, idxConn.Namespace, "Expected idxConn to have empty namespace, but got '%s'", idxConn.Namespace) 479 | require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") 480 | require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") 481 | } 482 | 483 | func TestNewIndexConnectionNamespace(t *testing.T) { 484 | apiKey := "test-api-key" 485 | host := "test-host.io" 486 | namespace := "test-namespace" 487 | sourceTag := "test-source-tag" 488 | additionalMetadata := map[string]string{"api-key": apiKey} 489 | idxConn, err := newIndexConnection(newIndexParameters{ 490 | additionalMetadata: additionalMetadata, 491 | host: host, 492 | namespace: namespace, 493 | sourceTag: sourceTag}) 494 | 495 | require.NoError(t, err) 496 | apiKeyHeader, ok := idxConn.additionalMetadata["api-key"] 497 | require.True(t, ok, "Expected client to have an 'api-key' header") 498 | require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) 499 | require.Equal(t, namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) 500 | require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") 501 | require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") 502 | } 503 | 504 | func TestMarshalFetchVectorsResponseUnit(t *testing.T) { 505 | vec1Values := []float32{0.01, 0.01, 0.01} 506 | vec2Values := []float32{0.02, 0.02, 0.02} 507 | 508 | tests := []struct { 509 | name string 510 | input FetchVectorsResponse 511 | want string 512 | }{ 513 | { 514 | name: "All fields present", 515 | input: FetchVectorsResponse{ 516 | Vectors: map[string]*Vector{ 517 | "vec-1": {Id: "vec-1", Values: &vec1Values}, 518 | "vec-2": {Id: "vec-2", Values: &vec2Values}, 519 | }, 520 | Usage: &Usage{ReadUnits: 5}, 521 | Namespace: "test-namespace", 522 | }, 523 | want: `{"vectors":{"vec-1":{"id":"vec-1","values":[0.01,0.01,0.01]},"vec-2":{"id":"vec-2","values":[0.02,0.02,0.02]}},"usage":{"read_units":5},"namespace":"test-namespace"}`, 524 | }, 525 | { 526 | name: "Fields omitted", 527 | input: FetchVectorsResponse{}, 528 | want: `{"namespace":""}`, 529 | }, 530 | { 531 | name: "Fields empty", 532 | input: FetchVectorsResponse{ 533 | Vectors: nil, 534 | Usage: nil, 535 | Namespace: "", 536 | }, 537 | want: `{"namespace":""}`, 538 | }, 539 | } 540 | 541 | for _, tt := range tests { 542 | t.Run(tt.name, func(t *testing.T) { 543 | bytes, err := json.Marshal(tt.input) 544 | if err != nil { 545 | t.Fatalf("Failed to marshal FetchVectorsResponse: %v", err) 546 | } 547 | 548 | if got := string(bytes); got != tt.want { 549 | t.Errorf("Marshal FetchVectorsResponse got = %s, want = %s", got, tt.want) 550 | } 551 | }) 552 | } 553 | } 554 | 555 | func TestMarshalListVectorsResponseUnit(t *testing.T) { 556 | vectorId1 := "vec-1" 557 | vectorId2 := "vec-2" 558 | paginationToken := "next-token" 559 | tests := []struct { 560 | name string 561 | input ListVectorsResponse 562 | want string 563 | }{ 564 | { 565 | name: "All fields present", 566 | input: ListVectorsResponse{ 567 | VectorIds: []*string{&vectorId1, &vectorId2}, 568 | Usage: &Usage{ReadUnits: 5}, 569 | NextPaginationToken: &paginationToken, 570 | Namespace: "test-namespace", 571 | }, 572 | want: `{"vector_ids":["vec-1","vec-2"],"usage":{"read_units":5},"next_pagination_token":"next-token","namespace":"test-namespace"}`, 573 | }, 574 | { 575 | name: "Fields omitted", 576 | input: ListVectorsResponse{}, 577 | want: `{"namespace":""}`, 578 | }, 579 | { 580 | name: "Fields empty", 581 | input: ListVectorsResponse{ 582 | VectorIds: nil, 583 | Usage: nil, 584 | NextPaginationToken: nil, 585 | Namespace: "", 586 | }, 587 | want: `{"namespace":""}`, 588 | }, 589 | } 590 | 591 | for _, tt := range tests { 592 | t.Run(tt.name, func(t *testing.T) { 593 | bytes, err := json.Marshal(tt.input) 594 | if err != nil { 595 | t.Fatalf("Failed to marshal ListVectorsResponse: %v", err) 596 | } 597 | 598 | if got := string(bytes); got != tt.want { 599 | t.Errorf("Marshal ListVectorsResponse got = %s, want = %s", got, tt.want) 600 | } 601 | }) 602 | } 603 | } 604 | 605 | func TestMarshalQueryVectorsResponseUnit(t *testing.T) { 606 | vec1Values := []float32{0.01, 0.01, 0.01} 607 | vec2Values := []float32{0.02, 0.02, 0.02} 608 | tests := []struct { 609 | name string 610 | input QueryVectorsResponse 611 | want string 612 | }{ 613 | { 614 | name: "All fields present", 615 | input: QueryVectorsResponse{ 616 | Matches: []*ScoredVector{ 617 | {Vector: &Vector{Id: "vec-1", Values: &vec1Values}, Score: 0.1}, 618 | {Vector: &Vector{Id: "vec-2", Values: &vec2Values}, Score: 0.2}, 619 | }, 620 | Usage: &Usage{ReadUnits: 5}, 621 | Namespace: "test-namespace", 622 | }, 623 | want: `{"matches":[{"vector":{"id":"vec-1","values":[0.01,0.01,0.01]},"score":0.1},{"vector":{"id":"vec-2","values":[0.02,0.02,0.02]},"score":0.2}],"usage":{"read_units":5},"namespace":"test-namespace"}`, 624 | }, 625 | { 626 | name: "Fields omitted", 627 | input: QueryVectorsResponse{}, 628 | want: `{"namespace":""}`, 629 | }, 630 | { 631 | name: "Fields empty", 632 | input: QueryVectorsResponse{Matches: nil, Usage: nil}, 633 | want: `{"namespace":""}`, 634 | }, 635 | } 636 | 637 | for _, tt := range tests { 638 | t.Run(tt.name, func(t *testing.T) { 639 | bytes, err := json.Marshal(tt.input) 640 | if err != nil { 641 | t.Fatalf("Failed to marshal QueryVectorsResponse: %v", err) 642 | } 643 | 644 | if got := string(bytes); got != tt.want { 645 | t.Errorf("Marshal QueryVectorsResponse got = %s, want = %s", got, tt.want) 646 | } 647 | }) 648 | } 649 | } 650 | 651 | func TestMarshalDescribeIndexStatsResponseUnit(t *testing.T) { 652 | tests := []struct { 653 | name string 654 | input DescribeIndexStatsResponse 655 | want string 656 | }{ 657 | { 658 | name: "All fields present", 659 | input: DescribeIndexStatsResponse{ 660 | Dimension: uint32Pointer(3), 661 | IndexFullness: 0.5, 662 | TotalVectorCount: 100, 663 | Namespaces: map[string]*NamespaceSummary{ 664 | "namespace-1": {VectorCount: 50}, 665 | }, 666 | }, 667 | want: `{"dimension":3,"index_fullness":0.5,"total_vector_count":100,"namespaces":{"namespace-1":{"vector_count":50}}}`, 668 | }, 669 | { 670 | name: "Fields omitted", 671 | input: DescribeIndexStatsResponse{}, 672 | want: `{"dimension":null,"index_fullness":0,"total_vector_count":0}`, 673 | }, 674 | { 675 | name: "Fields empty", 676 | input: DescribeIndexStatsResponse{ 677 | Dimension: uint32Pointer(0), 678 | IndexFullness: 0, 679 | TotalVectorCount: 0, 680 | Namespaces: nil, 681 | }, 682 | want: `{"dimension":0,"index_fullness":0,"total_vector_count":0}`, 683 | }, 684 | } 685 | 686 | for _, tt := range tests { 687 | t.Run(tt.name, func(t *testing.T) { 688 | bytes, err := json.Marshal(tt.input) 689 | if err != nil { 690 | t.Fatalf("Failed to marshal DescribeIndexStatsResponse: %v", err) 691 | } 692 | 693 | if got := string(bytes); got != tt.want { 694 | t.Errorf("Marshal DescribeIndexStatsResponse got = %s, want = %s", got, tt.want) 695 | } 696 | }) 697 | } 698 | } 699 | 700 | func TestToVectorUnit(t *testing.T) { 701 | vecValues := []float32{0.01, 0.02, 0.03} 702 | 703 | tests := []struct { 704 | name string 705 | vector *db_data_grpc.Vector 706 | expected *Vector 707 | }{ 708 | { 709 | name: "Pass nil vector, expect nil to be returned", 710 | vector: nil, 711 | expected: nil, 712 | }, 713 | { 714 | name: "Pass dense vector", 715 | vector: &db_data_grpc.Vector{ 716 | Id: "dense-1", 717 | Values: []float32{0.01, 0.02, 0.03}, 718 | }, 719 | expected: &Vector{ 720 | Id: "dense-1", 721 | Values: &vecValues, 722 | }, 723 | }, 724 | { 725 | name: "Pass sparse vector", 726 | vector: &db_data_grpc.Vector{ 727 | Id: "sparse-1", 728 | Values: nil, 729 | SparseValues: &db_data_grpc.SparseValues{ 730 | Indices: []uint32{0, 2}, 731 | Values: []float32{0.01, 0.03}, 732 | }, 733 | }, 734 | expected: &Vector{ 735 | Id: "sparse-1", 736 | Values: nil, 737 | SparseValues: &SparseValues{ 738 | Indices: []uint32{0, 2}, 739 | Values: []float32{0.01, 0.03}, 740 | }, 741 | }, 742 | }, 743 | { 744 | name: "Pass hybrid vector", 745 | vector: &db_data_grpc.Vector{ 746 | Id: "hybrid-1", 747 | Values: []float32{0.01, 0.02, 0.03}, 748 | SparseValues: &db_data_grpc.SparseValues{ 749 | Indices: []uint32{0, 2}, 750 | Values: []float32{0.01, 0.03}, 751 | }, 752 | }, 753 | 754 | expected: &Vector{ 755 | Id: "hybrid-1", 756 | Values: &vecValues, 757 | SparseValues: &SparseValues{ 758 | Indices: []uint32{0, 2}, 759 | Values: []float32{0.01, 0.03}, 760 | }, 761 | }, 762 | }, 763 | { 764 | name: "Pass hybrid vector with metadata", 765 | vector: &db_data_grpc.Vector{ 766 | Id: "hybrid-metadata-1", 767 | Values: []float32{0.01, 0.02, 0.03}, 768 | SparseValues: &db_data_grpc.SparseValues{ 769 | Indices: []uint32{0, 2}, 770 | Values: []float32{0.01, 0.03}, 771 | }, 772 | Metadata: &structpb.Struct{ 773 | Fields: map[string]*structpb.Value{ 774 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 775 | }}, 776 | }, 777 | expected: &Vector{ 778 | Id: "hybrid-metadata-1", 779 | Values: &vecValues, 780 | SparseValues: &SparseValues{ 781 | Indices: []uint32{0, 2}, 782 | Values: []float32{0.01, 0.03}, 783 | }, 784 | Metadata: &structpb.Struct{ 785 | Fields: map[string]*structpb.Value{ 786 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 787 | }}, 788 | }, 789 | }, 790 | } 791 | 792 | for _, tt := range tests { 793 | t.Run(tt.name, func(t *testing.T) { 794 | result := toVector(tt.vector) 795 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 796 | }) 797 | } 798 | } 799 | 800 | func TestToSparseValuesUnit(t *testing.T) { 801 | tests := []struct { 802 | name string 803 | sparseValues *db_data_grpc.SparseValues 804 | expected *SparseValues 805 | }{ 806 | { 807 | name: "Pass nil sparse values, expect nil to be returned", 808 | sparseValues: nil, 809 | expected: nil, 810 | }, 811 | { 812 | name: "Pass sparse values", 813 | sparseValues: &db_data_grpc.SparseValues{ 814 | Indices: []uint32{0, 2}, 815 | Values: []float32{0.01, 0.03}, 816 | }, 817 | expected: &SparseValues{ 818 | Indices: []uint32{0, 2}, 819 | Values: []float32{0.01, 0.03}, 820 | }, 821 | }, 822 | } 823 | for _, tt := range tests { 824 | t.Run(tt.name, func(t *testing.T) { 825 | result := toSparseValues(tt.sparseValues) 826 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 827 | }) 828 | } 829 | } 830 | 831 | func TestToScoredVectorUnit(t *testing.T) { 832 | vecValues := []float32{0.01, 0.02, 0.03} 833 | 834 | tests := []struct { 835 | name string 836 | scoredVector *db_data_grpc.ScoredVector 837 | expected *ScoredVector 838 | }{ 839 | { 840 | name: "Pass nil scored vector, expect nil to be returned", 841 | scoredVector: nil, 842 | expected: nil, 843 | }, 844 | { 845 | name: "Pass scored dense vector", 846 | scoredVector: &db_data_grpc.ScoredVector{ 847 | Id: "dense-1", 848 | Values: []float32{0.01, 0.02, 0.03}, 849 | Score: 0.1, 850 | }, 851 | expected: &ScoredVector{ 852 | Vector: &Vector{ 853 | Id: "dense-1", 854 | Values: &vecValues, 855 | }, 856 | Score: 0.1, 857 | }, 858 | }, 859 | { 860 | name: "Pass scored sparse vector", 861 | scoredVector: &db_data_grpc.ScoredVector{ 862 | Id: "sparse-1", 863 | SparseValues: &db_data_grpc.SparseValues{ 864 | Indices: []uint32{0, 2}, 865 | Values: []float32{0.01, 0.03}, 866 | }, 867 | Score: 0.2, 868 | }, 869 | expected: &ScoredVector{ 870 | Vector: &Vector{ 871 | Id: "sparse-1", 872 | SparseValues: &SparseValues{ 873 | Indices: []uint32{0, 2}, 874 | Values: []float32{0.01, 0.03}, 875 | }, 876 | }, 877 | Score: 0.2, 878 | }, 879 | }, 880 | { 881 | name: "Pass scored hybrid vector", 882 | scoredVector: &db_data_grpc.ScoredVector{ 883 | Id: "hybrid-1", 884 | Values: []float32{0.01, 0.02, 0.03}, 885 | SparseValues: &db_data_grpc.SparseValues{ 886 | Indices: []uint32{0, 2}, 887 | Values: []float32{0.01, 0.03}, 888 | }, 889 | Score: 0.3, 890 | }, 891 | expected: &ScoredVector{ 892 | Vector: &Vector{ 893 | Id: "hybrid-1", 894 | Values: &vecValues, 895 | SparseValues: &SparseValues{ 896 | Indices: []uint32{0, 2}, 897 | Values: []float32{0.01, 0.03}, 898 | }, 899 | }, 900 | Score: 0.3, 901 | }, 902 | }, 903 | { 904 | name: "Pass scored hybrid vector with metadata", 905 | scoredVector: &db_data_grpc.ScoredVector{ 906 | Id: "hybrid-metadata-1", 907 | Values: []float32{0.01, 0.02, 0.03}, 908 | SparseValues: &db_data_grpc.SparseValues{ 909 | Indices: []uint32{0, 2}, 910 | Values: []float32{0.01, 0.03}, 911 | }, 912 | Metadata: &structpb.Struct{ 913 | Fields: map[string]*structpb.Value{ 914 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 915 | }, 916 | }, 917 | Score: 0.4, 918 | }, 919 | expected: &ScoredVector{ 920 | Vector: &Vector{ 921 | Id: "hybrid-metadata-1", 922 | Values: &vecValues, 923 | SparseValues: &SparseValues{ 924 | Indices: []uint32{0, 2}, 925 | Values: []float32{0.01, 0.03}, 926 | }, 927 | Metadata: &structpb.Struct{ 928 | Fields: map[string]*structpb.Value{ 929 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 930 | }, 931 | }, 932 | }, 933 | Score: 0.4, 934 | }, 935 | }, 936 | } 937 | for _, tt := range tests { 938 | t.Run(tt.name, func(t *testing.T) { 939 | result := toScoredVector(tt.scoredVector) 940 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 941 | }) 942 | } 943 | } 944 | 945 | func TestVecToGrpcUnit(t *testing.T) { 946 | vecValues := []float32{0.01, 0.02, 0.03} 947 | 948 | tests := []struct { 949 | name string 950 | vector *Vector 951 | expected *db_data_grpc.Vector 952 | }{ 953 | { 954 | name: "Pass nil vector, expect nil to be returned", 955 | vector: nil, 956 | expected: nil, 957 | }, 958 | { 959 | name: "Pass dense vector", 960 | vector: &Vector{ 961 | Id: "dense-1", 962 | Values: &vecValues, 963 | }, 964 | expected: &db_data_grpc.Vector{ 965 | Id: "dense-1", 966 | Values: []float32{0.01, 0.02, 0.03}, 967 | }, 968 | }, 969 | { 970 | name: "Pass sparse vector", 971 | vector: &Vector{ 972 | Id: "sparse-1", 973 | Values: nil, 974 | SparseValues: &SparseValues{ 975 | Indices: []uint32{0, 2}, 976 | Values: []float32{0.01, 0.03}, 977 | }, 978 | }, 979 | expected: &db_data_grpc.Vector{ 980 | Id: "sparse-1", 981 | SparseValues: &db_data_grpc.SparseValues{ 982 | Indices: []uint32{0, 2}, 983 | Values: []float32{0.01, 0.03}, 984 | }, 985 | }, 986 | }, 987 | { 988 | name: "Pass hybrid vector", 989 | vector: &Vector{ 990 | Id: "hybrid-1", 991 | Values: &vecValues, 992 | SparseValues: &SparseValues{ 993 | Indices: []uint32{0, 2}, 994 | Values: []float32{0.01, 0.03}, 995 | }, 996 | }, 997 | expected: &db_data_grpc.Vector{ 998 | Id: "hybrid-1", 999 | Values: []float32{0.01, 0.02, 0.03}, 1000 | SparseValues: &db_data_grpc.SparseValues{ 1001 | Indices: []uint32{0, 2}, 1002 | Values: []float32{0.01, 0.03}, 1003 | }, 1004 | }, 1005 | }, 1006 | { 1007 | name: "Pass hybrid vector with metadata", 1008 | vector: &Vector{ 1009 | Id: "hybrid-metadata-1", 1010 | Values: &vecValues, 1011 | SparseValues: &SparseValues{ 1012 | Indices: []uint32{0, 2}, 1013 | Values: []float32{0.01, 0.03}, 1014 | }, 1015 | Metadata: &structpb.Struct{ 1016 | Fields: map[string]*structpb.Value{ 1017 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 1018 | }, 1019 | }, 1020 | }, 1021 | expected: &db_data_grpc.Vector{ 1022 | Id: "hybrid-metadata-1", 1023 | Values: []float32{0.01, 0.02, 0.03}, 1024 | SparseValues: &db_data_grpc.SparseValues{ 1025 | Indices: []uint32{0, 2}, 1026 | Values: []float32{0.01, 0.03}, 1027 | }, 1028 | Metadata: &structpb.Struct{ 1029 | Fields: map[string]*structpb.Value{ 1030 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 1031 | }, 1032 | }, 1033 | }, 1034 | }, 1035 | } 1036 | 1037 | for _, tt := range tests { 1038 | t.Run(tt.name, func(t *testing.T) { 1039 | result := vecToGrpc(tt.vector) 1040 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 1041 | }) 1042 | } 1043 | } 1044 | 1045 | func TestSparseValToGrpcUnit(t *testing.T) { 1046 | tests := []struct { 1047 | name string 1048 | sparseValues *SparseValues 1049 | metadata *structpb.Struct 1050 | expected *db_data_grpc.SparseValues 1051 | }{ 1052 | { 1053 | name: "Pass nil sparse values, expect nil to be returned", 1054 | sparseValues: nil, 1055 | expected: nil, 1056 | }, 1057 | { 1058 | name: "Pass sparse values", 1059 | sparseValues: &SparseValues{ 1060 | Indices: []uint32{0, 2}, 1061 | Values: []float32{0.01, 0.03}, 1062 | }, 1063 | expected: &db_data_grpc.SparseValues{ 1064 | Indices: []uint32{0, 2}, 1065 | Values: []float32{0.01, 0.03}, 1066 | }, 1067 | }, 1068 | { 1069 | name: "Pass sparse values with metadata (metadata is ignored)", 1070 | sparseValues: &SparseValues{ 1071 | Indices: []uint32{0, 2}, 1072 | Values: []float32{0.01, 0.03}, 1073 | }, 1074 | metadata: &structpb.Struct{ 1075 | Fields: map[string]*structpb.Value{ 1076 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 1077 | }, 1078 | }, 1079 | expected: &db_data_grpc.SparseValues{ 1080 | Indices: []uint32{0, 2}, 1081 | Values: []float32{0.01, 0.03}, 1082 | }, 1083 | }, 1084 | } 1085 | for _, tt := range tests { 1086 | t.Run(tt.name, func(t *testing.T) { 1087 | result := sparseValToGrpc(tt.sparseValues) 1088 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 1089 | }) 1090 | } 1091 | } 1092 | 1093 | func TestAkCtxUnit(t *testing.T) { 1094 | tests := []struct { 1095 | name string 1096 | additionalMetadata map[string]string 1097 | initialMetadata map[string]string 1098 | expectedMetadata map[string]string 1099 | }{ 1100 | { 1101 | name: "No additional metadata in IndexConnection obj", 1102 | additionalMetadata: nil, 1103 | initialMetadata: map[string]string{"initial-key": "initial-value"}, 1104 | expectedMetadata: map[string]string{"initial-key": "initial-value"}, 1105 | }, 1106 | { 1107 | name: "With additional metadata in IndexConnection obj", 1108 | additionalMetadata: map[string]string{"addtl-key1": "addtl-value1", "addtl-key2": "addtl-value2"}, 1109 | initialMetadata: map[string]string{"initial-key": "initial-value"}, 1110 | expectedMetadata: map[string]string{ 1111 | "initial-key": "initial-value", 1112 | "addtl-key1": "addtl-value1", 1113 | "addtl-key2": "addtl-value2", 1114 | }, 1115 | }, 1116 | { 1117 | name: "Only additional metadata", 1118 | additionalMetadata: map[string]string{ 1119 | "addtl-key1": "addtl-value1", 1120 | "addtl-key2": "addtl-value2", 1121 | }, 1122 | initialMetadata: nil, 1123 | expectedMetadata: map[string]string{ 1124 | "addtl-key1": "addtl-value1", 1125 | "addtl-key2": "addtl-value2", 1126 | }, 1127 | }, 1128 | } 1129 | 1130 | for _, tt := range tests { 1131 | t.Run(tt.name, func(t *testing.T) { 1132 | idx := &IndexConnection{additionalMetadata: tt.additionalMetadata} 1133 | ctx := context.Background() 1134 | 1135 | // Add initial metadata to the context if provided 1136 | if tt.initialMetadata != nil { 1137 | md := metadata.New(tt.initialMetadata) 1138 | ctx = metadata.NewOutgoingContext(ctx, md) 1139 | } 1140 | 1141 | // Call the method 1142 | newCtx := idx.akCtx(ctx) 1143 | 1144 | // Retrieve metadata from the new context 1145 | md, ok := metadata.FromOutgoingContext(newCtx) 1146 | assert.True(t, ok) 1147 | 1148 | // Check that the metadata matches the expected metadata 1149 | for key, expectedValue := range tt.expectedMetadata { 1150 | values := md[key] 1151 | assert.Contains(t, values, expectedValue) 1152 | } 1153 | }) 1154 | } 1155 | } 1156 | 1157 | func TestToUsageUnit(t *testing.T) { 1158 | u5 := uint32(5) 1159 | 1160 | tests := []struct { 1161 | name string 1162 | usage *db_data_grpc.Usage 1163 | expected *Usage 1164 | }{ 1165 | { 1166 | name: "Pass nil usage, expect nil to be returned", 1167 | usage: nil, 1168 | expected: nil, 1169 | }, 1170 | { 1171 | name: "Pass usage", 1172 | usage: &db_data_grpc.Usage{ 1173 | ReadUnits: &u5, 1174 | }, 1175 | expected: &Usage{ 1176 | ReadUnits: 5, 1177 | }, 1178 | }, 1179 | } 1180 | 1181 | for _, tt := range tests { 1182 | t.Run(tt.name, func(t *testing.T) { 1183 | result := toUsage(tt.usage) 1184 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 1185 | }) 1186 | } 1187 | } 1188 | 1189 | func TestNormalizeHostUnit(t *testing.T) { 1190 | tests := []struct { 1191 | name string 1192 | host string 1193 | expectedHost string 1194 | expectedIsSecure bool 1195 | }{ 1196 | { 1197 | name: "https:// scheme should be removed", 1198 | host: "https://this-is-my-host.io", 1199 | expectedHost: "this-is-my-host.io", 1200 | expectedIsSecure: true, 1201 | }, { 1202 | name: "https:// scheme should be removed", 1203 | host: "https://this-is-my-host.io:33445", 1204 | expectedHost: "this-is-my-host.io:33445", 1205 | expectedIsSecure: true, 1206 | }, 1207 | } 1208 | 1209 | for _, tt := range tests { 1210 | t.Run(tt.name, func(t *testing.T) { 1211 | result, isSecure := normalizeHost(tt.host) 1212 | assert.Equal(t, tt.expectedHost, result, "Expected result to be '%s', but got '%s'", tt.expectedHost, result) 1213 | assert.Equal(t, tt.expectedIsSecure, isSecure, "Expected isSecure to be '%t', but got '%t'", tt.expectedIsSecure, isSecure) 1214 | }) 1215 | } 1216 | } 1217 | 1218 | func TestToPaginationTokenGrpc(t *testing.T) { 1219 | tokenForNilCase := "" 1220 | tokenForPositiveCase := "next-token" 1221 | 1222 | tests := []struct { 1223 | name string 1224 | token *db_data_grpc.Pagination 1225 | expected *string 1226 | }{ 1227 | { 1228 | name: "Pass empty token, expect empty string to be returned", 1229 | token: &db_data_grpc.Pagination{}, 1230 | expected: &tokenForNilCase, 1231 | }, 1232 | { 1233 | name: "Pass token", 1234 | token: &db_data_grpc.Pagination{ 1235 | Next: "next-token", 1236 | }, 1237 | expected: &tokenForPositiveCase, 1238 | }, 1239 | } 1240 | 1241 | for _, tt := range tests { 1242 | t.Run(tt.name, func(t *testing.T) { 1243 | result := toPaginationTokenGrpc(tt.token) 1244 | assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) 1245 | }) 1246 | } 1247 | } 1248 | 1249 | // Helper funcs 1250 | func generateFloat32Array(n int) []float32 { 1251 | array := make([]float32, n) 1252 | for i := 0; i < n; i++ { 1253 | array[i] = float32(i) 1254 | } 1255 | return array 1256 | } 1257 | 1258 | func generateUint32Array(n int) []uint32 { 1259 | array := make([]uint32, n) 1260 | for i := 0; i < n; i++ { 1261 | array[i] = uint32(i) 1262 | } 1263 | return array 1264 | } 1265 | 1266 | func uint32Pointer(i uint32) *uint32 { 1267 | return &i 1268 | } 1269 | 1270 | func slicesEqual[T comparable](a, b []float32) bool { 1271 | if len(a) != len(b) { 1272 | return false 1273 | } 1274 | 1275 | for i := range a { 1276 | if a[i] != b[i] { 1277 | return false 1278 | } 1279 | } 1280 | return true 1281 | } 1282 | -------------------------------------------------------------------------------- /pinecone/local_test.go: -------------------------------------------------------------------------------- 1 | //go:build localServer 2 | 3 | package pinecone 4 | 5 | import ( 6 | "context" 7 | "fmt" 8 | "os" 9 | "strconv" 10 | "testing" 11 | 12 | "github.com/stretchr/testify/assert" 13 | "github.com/stretchr/testify/require" 14 | "github.com/stretchr/testify/suite" 15 | "google.golang.org/grpc" 16 | "google.golang.org/grpc/credentials/insecure" 17 | "google.golang.org/protobuf/encoding/protojson" 18 | "google.golang.org/protobuf/types/known/structpb" 19 | ) 20 | 21 | type LocalIntegrationTests struct { 22 | suite.Suite 23 | client *Client 24 | host string 25 | dimension int32 26 | indexType string 27 | namespace string 28 | metadata *Metadata 29 | vectorIds []string 30 | idxConns []*IndexConnection 31 | } 32 | 33 | func (ts *LocalIntegrationTests) SetupSuite() { 34 | ctx := context.Background() 35 | 36 | // Deterministically create vectors 37 | vectors := generateVectors(100, ts.dimension, false, ts.metadata) 38 | 39 | // Get vector ids for the suite 40 | vectorIds := make([]string, len(vectors)) 41 | for i, v := range vectors { 42 | vectorIds[i] = v.Id 43 | } 44 | 45 | // Upsert vectors into each index connection 46 | for _, idxConn := range ts.idxConns { 47 | upsertedVectors, err := idxConn.UpsertVectors(ctx, vectors) 48 | require.NoError(ts.T(), err) 49 | fmt.Printf("Upserted vectors: %v into host: %s in namespace: %s \n", upsertedVectors, ts.host, idxConn.Namespace) 50 | } 51 | 52 | ts.vectorIds = append(ts.vectorIds, vectorIds...) 53 | } 54 | 55 | func (ts *LocalIntegrationTests) TearDownSuite() { 56 | // test deleting vectors as a part of cleanup for each index connection 57 | for _, idxConn := range ts.idxConns { 58 | // Delete a slice of vectors by id 59 | err := idxConn.DeleteVectorsById(context.Background(), ts.vectorIds[10:20]) 60 | require.NoError(ts.T(), err) 61 | 62 | // Delete vectors by filter 63 | if ts.indexType == "pods" { 64 | err = idxConn.DeleteVectorsByFilter(context.Background(), ts.metadata) 65 | require.NoError(ts.T(), err) 66 | } 67 | 68 | // Delete all remaining vectors 69 | err = idxConn.DeleteAllVectorsInNamespace(context.Background()) 70 | require.NoError(ts.T(), err) 71 | } 72 | 73 | description, err := ts.idxConns[0].DescribeIndexStats(context.Background()) 74 | require.NoError(ts.T(), err) 75 | assert.NotNil(ts.T(), description, "Index description should not be nil") 76 | assert.Equal(ts.T(), uint32(0), description.TotalVectorCount, "Total vector count should be 0 after deleting") 77 | } 78 | 79 | // This is the entry point for all local integration tests 80 | // This test function is picked up by go test and triggers the suite runs when 81 | // the build tag localServer is set 82 | func TestRunLocalIntegrationSuite(t *testing.T) { 83 | fmt.Println("Running local integration tests") 84 | RunLocalSuite(t) 85 | } 86 | 87 | func RunLocalSuite(t *testing.T) { 88 | fmt.Println("Running local integration tests") 89 | localHostPod, present := os.LookupEnv("PINECONE_INDEX_URL_POD") 90 | assert.True(t, present, "PINECONE_INDEX_URL_POD env variable not set") 91 | 92 | localHostServerless, present := os.LookupEnv("PINECONE_INDEX_URL_SERVERLESS") 93 | assert.True(t, present, "PINECONE_INDEX_URL_SERVERLESS env variable not set") 94 | 95 | dimension, present := os.LookupEnv("PINECONE_DIMENSION") 96 | assert.True(t, present, "PINECONE_DIMENSION env variable not set") 97 | 98 | parsedDimension, err := strconv.ParseInt(dimension, 10, 32) 99 | require.NoError(t, err) 100 | 101 | namespace := "test-namespace" 102 | metadata := &structpb.Struct{ 103 | Fields: map[string]*structpb.Value{ 104 | "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, 105 | }, 106 | } 107 | 108 | client, err := NewClientBase(NewClientBaseParams{}) 109 | require.NotNil(t, client, "Client should not be nil after creation") 110 | require.NoError(t, err) 111 | 112 | // Create index connections for pod and serverless indexes with both default namespace 113 | // and a custom namespace 114 | var podIdxConns []*IndexConnection 115 | idxConnPod, err := client.Index(NewIndexConnParams{Host: localHostPod}) 116 | require.NoError(t, err) 117 | podIdxConns = append(podIdxConns, idxConnPod) 118 | 119 | idxConnPodNamespace, err := client.Index(NewIndexConnParams{Host: localHostPod, Namespace: namespace}) 120 | require.NoError(t, err) 121 | podIdxConns = append(podIdxConns, idxConnPodNamespace) 122 | 123 | var serverlessIdxConns []*IndexConnection 124 | idxConnServerless, err := client.Index(NewIndexConnParams{Host: localHostServerless}, 125 | grpc.WithTransportCredentials(insecure.NewCredentials())) 126 | require.NoError(t, err) 127 | serverlessIdxConns = append(serverlessIdxConns, idxConnServerless) 128 | 129 | idxConnServerless, err = client.Index(NewIndexConnParams{Host: localHostServerless, Namespace: namespace}) 130 | require.NoError(t, err) 131 | serverlessIdxConns = append(serverlessIdxConns, idxConnServerless) 132 | 133 | localHostPodSuite := &LocalIntegrationTests{ 134 | client: client, 135 | idxConns: podIdxConns, 136 | indexType: "pods", 137 | host: localHostPod, 138 | namespace: namespace, 139 | metadata: metadata, 140 | dimension: int32(parsedDimension), 141 | } 142 | 143 | localHostSuiteServerless := &LocalIntegrationTests{ 144 | client: client, 145 | idxConns: serverlessIdxConns, 146 | indexType: "serverless", 147 | host: localHostServerless, 148 | namespace: namespace, 149 | metadata: metadata, 150 | dimension: int32(parsedDimension), 151 | } 152 | 153 | suite.Run(t, localHostPodSuite) 154 | suite.Run(t, localHostSuiteServerless) 155 | } 156 | 157 | func (ts *LocalIntegrationTests) TestFetchVectors() { 158 | fetchVectorId := ts.vectorIds[0] 159 | 160 | for _, idxConn := range ts.idxConns { 161 | fetchVectorsResponse, err := idxConn.FetchVectors(context.Background(), []string{fetchVectorId}) 162 | require.NoError(ts.T(), err) 163 | 164 | assert.NotNil(ts.T(), fetchVectorsResponse, "Fetch vectors response should not be nil") 165 | assert.Equal(ts.T(), 1, len(fetchVectorsResponse.Vectors), "Fetch vectors response should have 1 vector") 166 | assert.Equal(ts.T(), fetchVectorId, fetchVectorsResponse.Vectors[fetchVectorId].Id, "Fetched vector id should match") 167 | } 168 | } 169 | 170 | func (ts *LocalIntegrationTests) TestQueryVectors() { 171 | queryVectorId := ts.vectorIds[0] 172 | topK := 10 173 | 174 | for _, idxConn := range ts.idxConns { 175 | queryVectorsByIdResponse, err := idxConn.QueryByVectorId(context.Background(), &QueryByVectorIdRequest{ 176 | VectorId: queryVectorId, 177 | TopK: uint32(topK), 178 | IncludeValues: true, 179 | IncludeMetadata: true, 180 | }) 181 | require.NoError(ts.T(), err) 182 | 183 | assert.NotNil(ts.T(), queryVectorsByIdResponse, "QueryByVectorId results should not be nil") 184 | assert.Equal(ts.T(), topK, len(queryVectorsByIdResponse.Matches), "QueryByVectorId results should have 10 matches") 185 | assert.Equal(ts.T(), queryVectorId, queryVectorsByIdResponse.Matches[0].Vector.Id, "Top QueryByVectorId result's vector id should match queryVectorId") 186 | 187 | queryByVectorValuesResponse, err := idxConn.QueryByVectorValues(context.Background(), &QueryByVectorValuesRequest{ 188 | Vector: *queryVectorsByIdResponse.Matches[0].Vector.Values, 189 | TopK: uint32(topK), 190 | MetadataFilter: ts.metadata, 191 | IncludeValues: true, 192 | IncludeMetadata: true, 193 | }) 194 | require.NoError(ts.T(), err) 195 | 196 | assert.NotNil(ts.T(), queryByVectorValuesResponse, "QueryByVectorValues results should not be nil") 197 | assert.Equal(ts.T(), topK, len(queryByVectorValuesResponse.Matches), "QueryByVectorValues results should have 10 matches") 198 | 199 | resultMetadata, err := protojson.Marshal(queryByVectorValuesResponse.Matches[0].Vector.Metadata) 200 | assert.NoError(ts.T(), err) 201 | suiteMetadata, err := protojson.Marshal(ts.metadata) 202 | assert.NoError(ts.T(), err) 203 | 204 | assert.Equal(ts.T(), resultMetadata, suiteMetadata, "Top QueryByVectorValues result's metadata should match the test suite's metadata") 205 | } 206 | } 207 | 208 | func (ts *LocalIntegrationTests) TestUpdateVectors() { 209 | updateVectorId := ts.vectorIds[0] 210 | newValues := generateVectorValues(ts.dimension) 211 | 212 | for _, idxConn := range ts.idxConns { 213 | err := idxConn.UpdateVector(context.Background(), &UpdateVectorRequest{Id: updateVectorId, Values: *newValues}) 214 | require.NoError(ts.T(), err) 215 | 216 | fetchVectorsResponse, err := idxConn.FetchVectors(context.Background(), []string{updateVectorId}) 217 | require.NoError(ts.T(), err) 218 | assert.Equal(ts.T(), newValues, fetchVectorsResponse.Vectors[updateVectorId].Values, "Updated vector values should match") 219 | } 220 | } 221 | 222 | func (ts *LocalIntegrationTests) TestDescribeIndexStats() { 223 | for _, idxConn := range ts.idxConns { 224 | description, err := idxConn.DescribeIndexStats(context.Background()) 225 | require.NoError(ts.T(), err) 226 | 227 | assert.NotNil(ts.T(), description, "Index description should not be nil") 228 | assert.Equal(ts.T(), description.TotalVectorCount, uint32(len(ts.vectorIds)*2), "Index host should match") 229 | } 230 | } 231 | 232 | func (ts *LocalIntegrationTests) TestListVectorIds() { 233 | limit := uint32(25) 234 | // Listing vector ids is only available for serverless indexes 235 | if ts.indexType == "serverless" { 236 | for _, idxConn := range ts.idxConns { 237 | listVectorIdsResponse, err := idxConn.ListVectors(context.Background(), &ListVectorsRequest{ 238 | Limit: &limit, 239 | }) 240 | require.NoError(ts.T(), err) 241 | 242 | assert.NotNil(ts.T(), listVectorIdsResponse, "ListVectors response should not be nil") 243 | assert.Equal(ts.T(), limit, uint32(len(listVectorIdsResponse.VectorIds)), "ListVectors response should have %d vector ids", limit) 244 | } 245 | } 246 | } 247 | -------------------------------------------------------------------------------- /pinecone/models.go: -------------------------------------------------------------------------------- 1 | package pinecone 2 | 3 | import ( 4 | "time" 5 | 6 | "google.golang.org/protobuf/types/known/structpb" 7 | ) 8 | 9 | // [IndexMetric] is the [similarity metric] to be used by similarity search against a Pinecone [Index]. 10 | // 11 | // [similarity metric]: https://docs.pinecone.io/guides/indexes/understanding-indexes#similarity-metrics 12 | type IndexMetric string 13 | 14 | const ( 15 | Cosine IndexMetric = "cosine" // Default distance metric, ideal for textual data 16 | Dotproduct IndexMetric = "dotproduct" // Ideal for hybrid search 17 | Euclidean IndexMetric = "euclidean" // Ideal for distance-based data (e.g. lat/long points) 18 | ) 19 | 20 | // [IndexStatusState] is the state of a Pinecone [Index]. 21 | type IndexStatusState string 22 | 23 | const ( 24 | InitializationFailed IndexStatusState = "InitializationFailed" 25 | Initializing IndexStatusState = "Initializing" 26 | Ready IndexStatusState = "Ready" 27 | ScalingDown IndexStatusState = "ScalingDown" 28 | ScalingDownPodSize IndexStatusState = "ScalingDownPodSize" 29 | ScalingUp IndexStatusState = "ScalingUp" 30 | ScalingUpPodSize IndexStatusState = "ScalingUpPodSize" 31 | Terminating IndexStatusState = "Terminating" 32 | ) 33 | 34 | // [DeletionProtection] determines whether [deletion protection] is "enabled" or "disabled" for the [Index]. 35 | // When "enabled", the [Index] cannot be deleted. Defaults to "disabled". 36 | // 37 | // [deletion protection]: http://docs.pinecone.io/guides/indexes/prevent-index-deletion 38 | type DeletionProtection string 39 | 40 | const ( 41 | DeletionProtectionEnabled DeletionProtection = "enabled" 42 | DeletionProtectionDisabled DeletionProtection = "disabled" 43 | ) 44 | 45 | // [Cloud] is the [cloud provider] to be used for a Pinecone serverless [Index]. 46 | // 47 | // [cloud provider]: https://docs.pinecone.io/troubleshooting/available-cloud-regions 48 | type Cloud string 49 | 50 | const ( 51 | Aws Cloud = "aws" 52 | Azure Cloud = "azure" 53 | Gcp Cloud = "gcp" 54 | ) 55 | 56 | // [IndexStatus] is the status of a Pinecone [Index]. 57 | type IndexStatus struct { 58 | Ready bool `json:"ready"` 59 | State IndexStatusState `json:"state"` 60 | } 61 | 62 | // [IndexSpec] is the infrastructure specification (pods vs serverless) of a Pinecone [Index]. 63 | type IndexSpec struct { 64 | Pod *PodSpec `json:"pod,omitempty"` 65 | Serverless *ServerlessSpec `json:"serverless,omitempty"` 66 | } 67 | 68 | // [IndexEmbed] represents the embedding model configured for an index, 69 | // including document fields mapped to embedding inputs. 70 | // 71 | // Fields: 72 | // - Model: The name of the embedding model used to create the index (e.g., "multilingual-e5-large"). 73 | // - Dimension: The dimension of the embedding model, specifying the size of the output vector. 74 | // - Metric: The distance metric used by the embedding model. If the 'vector_type' is 'sparse', 75 | // the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric 76 | // defaults to 'cosine'. 77 | // - VectorType: The index vector type associated with the model. If 'dense', the vector dimension must be specified. 78 | // If 'sparse', the vector dimension will be nil. 79 | // - FieldMap: Identifies the name of the text field from your document model that is embedded. 80 | // - ReadParameters: The read parameters for the embedding model. 81 | // - WriteParameters: The write parameters for the embedding model. 82 | type IndexEmbed struct { 83 | Model string `json:"model"` 84 | Dimension *int32 `json:"dimension,omitempty"` 85 | Metric *IndexMetric `json:"metric,omitempty"` 86 | VectorType *string `json:"vector_type,omitempty"` 87 | FieldMap *map[string]interface{} `json:"field_map,omitempty"` 88 | ReadParameters *map[string]interface{} `json:"read_parameters,omitempty"` 89 | WriteParameters *map[string]interface{} `json:"write_parameters,omitempty"` 90 | } 91 | 92 | // [IndexTags] is a set of key-value pairs that can be attached to a Pinecone [Index]. 93 | type IndexTags map[string]string 94 | 95 | // [Index] is a Pinecone [Index] object. Can be either a pod-based or a serverless [Index], depending on the [IndexSpec]. 96 | type Index struct { 97 | Name string `json:"name"` 98 | Host string `json:"host"` 99 | Metric IndexMetric `json:"metric"` 100 | VectorType string `json:"vector_type"` 101 | DeletionProtection DeletionProtection `json:"deletion_protection,omitempty"` 102 | Dimension *int32 `json:"dimension"` 103 | Spec *IndexSpec `json:"spec,omitempty"` 104 | Status *IndexStatus `json:"status,omitempty"` 105 | Tags *IndexTags `json:"tags,omitempty"` 106 | Embed *IndexEmbed `json:"embed,omitempty"` 107 | } 108 | 109 | // [Collection] is a Pinecone [collection entity]. Only available for pod-based Indexes. 110 | // 111 | // [collection entity]: https://docs.pinecone.io/guides/indexes/understanding-collections 112 | type Collection struct { 113 | Name string `json:"name"` 114 | Size int64 `json:"size"` 115 | Status CollectionStatus `json:"status"` 116 | Dimension int32 `json:"dimension"` 117 | VectorCount int32 `json:"vector_count"` 118 | Environment string `json:"environment"` 119 | } 120 | 121 | // [CollectionStatus] is the status of a Pinecone [Collection]. 122 | type CollectionStatus string 123 | 124 | const ( 125 | CollectionStatusInitializing CollectionStatus = "Initializing" 126 | CollectionStatusReady CollectionStatus = "Ready" 127 | CollectionStatusTerminating CollectionStatus = "Terminating" 128 | ) 129 | 130 | // [PodSpecMetadataConfig] represents the metadata fields to be indexed when a Pinecone [Index] is created. 131 | type PodSpecMetadataConfig struct { 132 | Indexed *[]string `json:"indexed,omitempty"` 133 | } 134 | 135 | // [PodSpec] is the infrastructure specification of a pod-based Pinecone [Index]. Only available for pod-based Indexes. 136 | type PodSpec struct { 137 | Environment string `json:"environment"` 138 | PodType string `json:"pod_type"` 139 | PodCount int `json:"pod_count"` 140 | Replicas int32 `json:"replicas"` 141 | ShardCount int32 `json:"shard_count"` 142 | SourceCollection *string `json:"source_collection,omitempty"` 143 | MetadataConfig *PodSpecMetadataConfig `json:"metadata_config,omitempty"` 144 | } 145 | 146 | // [ServerlessSpec] is the infrastructure specification of a serverless Pinecone [Index]. Only available for serverless Indexes. 147 | type ServerlessSpec struct { 148 | Cloud Cloud `json:"cloud"` 149 | Region string `json:"region"` 150 | } 151 | 152 | // [Vector] is a [dense or sparse vector object] with optional metadata. 153 | // 154 | // [dense or sparse vector object]: https://docs.pinecone.io/guides/get-started/key-concepts#dense-vector 155 | type Vector struct { 156 | Id string `json:"id"` 157 | Values *[]float32 `json:"values,omitempty"` 158 | SparseValues *SparseValues `json:"sparse_values,omitempty"` 159 | Metadata *Metadata `json:"metadata,omitempty"` 160 | } 161 | 162 | // [ScoredVector] is a vector with an associated similarity score calculated according to the distance metric of the 163 | // [Index]. 164 | type ScoredVector struct { 165 | Vector *Vector `json:"vector,omitempty"` 166 | Score float32 `json:"score"` 167 | } 168 | 169 | // [SparseValues] is a sparse vector objects, most commonly used for [hybrid search]. 170 | // 171 | // [hybrid search]: https://docs.pinecone.io/guides/data/understanding-hybrid-search#hybrid-search-in-pinecone 172 | type SparseValues struct { 173 | Indices []uint32 `json:"indices,omitempty"` 174 | Values []float32 `json:"values,omitempty"` 175 | } 176 | 177 | // [NamespaceSummary] is a summary of stats for a Pinecone [namespace]. 178 | // 179 | // [namespace]: https://docs.pinecone.io/guides/indexes/use-namespaces 180 | type NamespaceSummary struct { 181 | VectorCount uint32 `json:"vector_count"` 182 | } 183 | 184 | // [Usage] is the usage stats ([Read Units]) for a Pinecone [Index]. 185 | // 186 | // [Read Units]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#serverless-indexes 187 | type Usage struct { 188 | ReadUnits uint32 `json:"read_units"` 189 | } 190 | 191 | // [RerankUsage] is the usage stats ([Rerank Units]) for a reranking request. 192 | // 193 | // [Rerank Units]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#rerank 194 | type RerankUsage struct { 195 | RerankUnits *int `json:"rerank_units,omitempty"` 196 | } 197 | 198 | // [MetadataFilter] represents the [metadata filters] attached to a Pinecone request. 199 | // These optional metadata filters are applied to query and deletion requests. 200 | // 201 | // [metadata filters]: https://docs.pinecone.io/guides/data/filter-with-metadata#querying-an-index-with-metadata-filters 202 | type MetadataFilter = structpb.Struct 203 | 204 | // [Metadata] represents optional, 205 | // additional information that can be [attached to, or updated for, a vector] in a Pinecone Index. 206 | // 207 | // [attached to, or updated for, a vector]: https://docs.pinecone.io/guides/data/filter-with-metadata#inserting-metadata-into-an-index 208 | type Metadata = structpb.Struct 209 | 210 | // [Embedding] represents the embedding of a single input which is returned after [generating embeddings]. 211 | // 212 | // [generating embeddings]: https://docs.pinecone.io/guides/inference/generate-embeddings#3-generate-embeddings 213 | type Embedding struct { 214 | Values *[]float32 `json:"values,omitempty"` 215 | } 216 | 217 | // [ImportStatus] represents the status of an [Import] operation. 218 | // 219 | // Values: 220 | // - Cancelled: The [Import] was canceled. 221 | // - Completed: The [Import] completed successfully. 222 | // - Failed: The [Import] encountered an error and did not complete successfully. 223 | // - InProgress: The [Import] is currently in progress. 224 | // - Pending: The [Import] is pending and has not yet started. 225 | type ImportStatus string 226 | 227 | const ( 228 | Cancelled ImportStatus = "Cancelled" 229 | Completed ImportStatus = "Completed" 230 | Failed ImportStatus = "Failed" 231 | InProgress ImportStatus = "InProgress" 232 | Pending ImportStatus = "Pending" 233 | ) 234 | 235 | // ImportErrorMode specifies how errors are handled during an [Import]. 236 | // 237 | // Values: 238 | // - Abort: The [Import] process will abort upon encountering an error. 239 | // - Continue: The [Import] process will continue, skipping over records that produce errors. 240 | type ImportErrorMode string 241 | 242 | const ( 243 | Abort ImportErrorMode = "abort" 244 | Continue ImportErrorMode = "continue" 245 | ) 246 | 247 | // [Import] represents the details and status of an import process. 248 | // 249 | // Fields: 250 | // - Id: The unique identifier of the [Import] process. 251 | // - PercentComplete: The percentage of the [Import] process that has been completed. 252 | // - RecordsImported: The total number of records successfully imported. 253 | // - Status: The current status of the [Import] (e.g., "InProgress", "Completed", "Failed"). 254 | // - Uri: The URI of the source data for the [Import]. 255 | // - CreatedAt: The time at which the [Import] process was initiated. 256 | // - FinishedAt: The time at which the [Import] process finished (either successfully or with an error). 257 | // - Error: If the [Import] failed, contains the error message associated with the failure. 258 | type Import struct { 259 | Id string `json:"id,omitempty"` 260 | PercentComplete float32 `json:"percent_complete,omitempty"` 261 | RecordsImported int64 `json:"records_imported,omitempty"` 262 | Status ImportStatus `json:"status,omitempty"` 263 | Uri string `json:"uri,omitempty"` 264 | CreatedAt *time.Time `json:"created_at,omitempty"` 265 | FinishedAt *time.Time `json:"finished_at,omitempty"` 266 | Error *string `json:"error,omitempty"` 267 | } 268 | 269 | type IntegratedRecord map[string]interface{} 270 | 271 | // SearchRecordsRequest represents a search request for records in a specific namespace. 272 | // 273 | // Fields: 274 | // - Query: The query inputs to search with. 275 | // - Fields: The fields to return in the search results. 276 | // - Rerank: Parameters for reranking the initial search results. 277 | type SearchRecordsRequest struct { 278 | Query SearchRecordsQuery `json:"query"` 279 | Fields *[]string `json:"fields,omitempty"` 280 | Rerank *SearchRecordsRerank `json:"rerank,omitempty"` 281 | } 282 | 283 | // SearchRecordsQuery represents the query parameters for searching records. 284 | // 285 | // Fields: 286 | // - TopK: The number of results to return for each search. 287 | // - Filter: The filter to apply. 288 | // - Id: The unique ID of the vector to be used as a query vector. 289 | // - Inputs: Additional input parameters for the query. 290 | // - Vector: The vector representation of the query. 291 | type SearchRecordsQuery struct { 292 | TopK int32 `json:"top_k"` 293 | Filter *map[string]interface{} `json:"filter,omitempty"` 294 | Id *string `json:"id,omitempty"` 295 | Inputs *map[string]interface{} `json:"inputs,omitempty"` 296 | Vector *SearchRecordsVector `json:"vector,omitempty"` 297 | } 298 | 299 | // SearchRecordsRerank represents the parameters for reranking search results. 300 | // 301 | // Fields: 302 | // - Model: The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. 303 | // - RankFields: The field(s) to consider for reranking. Defaults to `["text"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). 304 | // - Parameters: Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. 305 | // - Query: The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. 306 | // - TopN: The number of top results to return after reranking. Defaults to top_k. 307 | type SearchRecordsRerank struct { 308 | Model string `json:"model"` 309 | RankFields []string `json:"rank_fields"` 310 | Parameters *map[string]interface{} `json:"parameters,omitempty"` 311 | Query *string `json:"query,omitempty"` 312 | TopN *int32 `json:"top_n,omitempty"` 313 | } 314 | 315 | // Hit represents a record whose vector values are similar to the provided search query. 316 | // 317 | // Fields: 318 | // - Id: The record ID of the search hit. 319 | // - Score: The similarity score of the returned record. 320 | // - Fields: The selected record fields associated with the search hit. 321 | type Hit struct { 322 | Id string `json:"_id"` 323 | Score float32 `json:"_score"` 324 | Fields map[string]interface{} `json:"fields"` 325 | } 326 | 327 | // SearchRecordsResponse represents the response of a records search. 328 | // 329 | // Fields: 330 | // - Result: The result object containing the [Hit] responses for the search. 331 | // - Usage: The resource usage details for the search operation. 332 | type SearchRecordsResponse struct { 333 | Result struct { 334 | Hits []Hit `json:"hits"` 335 | } `json:"result"` 336 | Usage SearchUsage `json:"usage"` 337 | } 338 | 339 | // SearchRecordsVector represents the vector data used in a search request. 340 | // 341 | // Fields: 342 | // - SparseIndices: The sparse embedding indices. 343 | // - SparseValues: The sparse embedding values. 344 | // - Values: The dense vector data included in the request. 345 | type SearchRecordsVector struct { 346 | SparseIndices *[]int32 `json:"sparse_indices,omitempty"` 347 | SparseValues *[]float32 `json:"sparse_values,omitempty"` 348 | Values *[]float32 `json:"values,omitempty"` 349 | } 350 | 351 | // SearchUsage represents the resource usage details of a search operation. 352 | // 353 | // Fields: 354 | // - ReadUnits: The number of read units consumed by this operation. 355 | // - EmbedTotalTokens: The number of embedding tokens consumed by this operation. 356 | // - RerankUnits: The number of rerank units consumed by this operation. 357 | type SearchUsage struct { 358 | ReadUnits int32 `json:"read_units"` 359 | EmbedTotalTokens *int32 `json:"embed_total_tokens,omitempty"` 360 | RerankUnits *int32 `json:"rerank_units,omitempty"` 361 | } 362 | -------------------------------------------------------------------------------- /pinecone/models_test.go: -------------------------------------------------------------------------------- 1 | package pinecone 2 | 3 | import ( 4 | "encoding/json" 5 | "testing" 6 | 7 | "google.golang.org/protobuf/types/known/structpb" 8 | ) 9 | 10 | func TestMarshalIndexStatusUnit(t *testing.T) { 11 | tests := []struct { 12 | name string 13 | input IndexStatus 14 | want string 15 | }{ 16 | { 17 | name: "All fields present", 18 | input: IndexStatus{Ready: true, State: "Ready"}, 19 | want: `{"ready":true,"state":"Ready"}`, 20 | }, 21 | { 22 | name: "Fields omitted", 23 | input: IndexStatus{}, 24 | want: `{"ready":false,"state":""}`, 25 | }, 26 | { 27 | name: "Fields empty", 28 | input: IndexStatus{Ready: false, State: ""}, 29 | want: `{"ready":false,"state":""}`, 30 | }, 31 | } 32 | 33 | for _, tt := range tests { 34 | t.Run(tt.name, func(c *testing.T) { 35 | got, err := json.Marshal(tt.input) 36 | if err != nil { 37 | c.Errorf("Failed to marshal IndexStatus: %v", err) 38 | return 39 | } 40 | if string(got) != tt.want { 41 | c.Errorf("Marshal IndexStatus got = %s, want = %s", string(got), tt.want) 42 | } 43 | }) 44 | } 45 | } 46 | 47 | func TestMarshalServerlessSpecUnit(t *testing.T) { 48 | tests := []struct { 49 | name string 50 | input ServerlessSpec 51 | want string 52 | }{ 53 | { 54 | name: "All fields present", 55 | input: ServerlessSpec{Cloud: "aws", Region: "us-west-"}, 56 | want: `{"cloud":"aws","region":"us-west-"}`, 57 | }, 58 | { 59 | name: "Fields omitted", 60 | input: ServerlessSpec{}, 61 | want: `{"cloud":"","region":""}`, 62 | }, 63 | { 64 | name: "Fields empty", 65 | input: ServerlessSpec{Cloud: "", Region: ""}, 66 | want: `{"cloud":"","region":""}`, 67 | }, 68 | } 69 | 70 | for _, tt := range tests { 71 | t.Run(tt.name, func(c *testing.T) { 72 | got, err := json.Marshal(tt.input) 73 | if err != nil { 74 | c.Errorf("Failed to marshal ServerlessSpec: %v", err) 75 | return 76 | } 77 | if string(got) != tt.want { 78 | c.Errorf("Marshal ServerlessSpec got = %s, want = %s", string(got), tt.want) 79 | } 80 | 81 | }) 82 | } 83 | } 84 | 85 | func TestMarshalPodSpecUnit(t *testing.T) { 86 | sourceCollection := "source-collection" 87 | tests := []struct { 88 | name string 89 | input PodSpec 90 | want string 91 | }{ 92 | { 93 | name: "All fields present", 94 | input: PodSpec{ 95 | Environment: "us-west2-gcp", 96 | PodType: "p1.x1", 97 | PodCount: 1, 98 | Replicas: 1, 99 | ShardCount: 1, 100 | SourceCollection: &sourceCollection, 101 | MetadataConfig: &PodSpecMetadataConfig{ 102 | Indexed: &[]string{"genre"}, 103 | }, 104 | }, 105 | want: `{"environment":"us-west2-gcp","pod_type":"p1.x1","pod_count":1,"replicas":1,"shard_count":1,"source_collection":"source-collection","metadata_config":{"indexed":["genre"]}}`, 106 | }, 107 | { 108 | name: "Fields omitted", 109 | input: PodSpec{}, 110 | want: `{"environment":"","pod_type":"","pod_count":0,"replicas":0,"shard_count":0}`, 111 | }, 112 | { 113 | name: "Fields empty", 114 | input: PodSpec{ 115 | Environment: "", 116 | PodType: "", 117 | PodCount: 0, 118 | Replicas: 0, 119 | ShardCount: 0, 120 | SourceCollection: nil, 121 | MetadataConfig: nil, 122 | }, 123 | want: `{"environment":"","pod_type":"","pod_count":0,"replicas":0,"shard_count":0}`, 124 | }, 125 | } 126 | 127 | for _, tt := range tests { 128 | t.Run(tt.name, func(c *testing.T) { 129 | got, err := json.Marshal(tt.input) 130 | if err != nil { 131 | c.Errorf("Failed to marshal PodSpec: %v", err) 132 | return 133 | } 134 | if string(got) != tt.want { 135 | c.Errorf("Marshal PodSpec got = %s, want = %s", string(got), tt.want) 136 | } 137 | }) 138 | } 139 | } 140 | 141 | func TestMarshalIndexSpecUnit(t *testing.T) { 142 | sourceCollection := "source-collection" 143 | tests := []struct { 144 | name string 145 | input IndexSpec 146 | want string 147 | }{ 148 | { 149 | name: "Pod spec", 150 | input: IndexSpec{Pod: &PodSpec{ 151 | Environment: "us-west2-gcp", 152 | PodType: "p1.x1", 153 | PodCount: 1, 154 | Replicas: 1, 155 | ShardCount: 1, 156 | SourceCollection: &sourceCollection, 157 | MetadataConfig: &PodSpecMetadataConfig{ 158 | Indexed: &[]string{"genre"}, 159 | }, 160 | }}, 161 | want: `{"pod":{"environment":"us-west2-gcp","pod_type":"p1.x1","pod_count":1,"replicas":1,"shard_count":1,"source_collection":"source-collection","metadata_config":{"indexed":["genre"]}}}`, 162 | }, 163 | { 164 | name: "Serverless spec", 165 | input: IndexSpec{Serverless: &ServerlessSpec{Cloud: "aws", Region: "us-west-"}}, 166 | want: `{"serverless":{"cloud":"aws","region":"us-west-"}}`, 167 | }, 168 | { 169 | name: "Fields omitted", 170 | input: IndexSpec{}, 171 | want: `{}`, 172 | }, 173 | { 174 | name: "Fields empty", 175 | input: IndexSpec{Pod: nil, Serverless: nil}, 176 | want: `{}`, 177 | }, 178 | } 179 | 180 | for _, tt := range tests { 181 | t.Run(tt.name, func(c *testing.T) { 182 | got, err := json.Marshal(tt.input) 183 | if err != nil { 184 | c.Errorf("Failed to marshal IndexSpec: %v", err) 185 | return 186 | } 187 | if string(got) != tt.want { 188 | c.Errorf("Marshal IndexSpec got = %s, want = %s", string(got), tt.want) 189 | } 190 | }) 191 | } 192 | } 193 | 194 | func TestMarshalIndexUnit(t *testing.T) { 195 | dimension := int32(128) 196 | 197 | tests := []struct { 198 | name string 199 | input Index 200 | want string 201 | }{ 202 | { 203 | name: "All fields present", 204 | input: Index{ 205 | Name: "test-index", 206 | Dimension: &dimension, 207 | Host: "index-host-1.io", 208 | Metric: "cosine", 209 | VectorType: "sparse", 210 | DeletionProtection: "enabled", 211 | Embed: &IndexEmbed{ 212 | Model: "multilingual-e5-large", 213 | }, 214 | Spec: &IndexSpec{ 215 | Serverless: &ServerlessSpec{ 216 | Cloud: "aws", 217 | Region: "us-west-2", 218 | }, 219 | }, 220 | Status: &IndexStatus{ 221 | Ready: true, 222 | State: "Ready", 223 | }, 224 | Tags: &IndexTags{ 225 | "test1": "test-tag-1", 226 | }, 227 | }, 228 | want: `{"name":"test-index","host":"index-host-1.io","metric":"cosine","vector_type":"sparse","deletion_protection":"enabled","dimension":128,"spec":{"serverless":{"cloud":"aws","region":"us-west-2"}},"status":{"ready":true,"state":"Ready"},"tags":{"test1":"test-tag-1"},"embed":{"model":"multilingual-e5-large"}}`, 229 | }, 230 | { 231 | name: "Fields omitted", 232 | input: Index{}, 233 | want: `{"name":"","host":"","metric":"","vector_type":"","dimension":null}`, 234 | }, 235 | { 236 | name: "Fields empty", 237 | input: Index{ 238 | Name: "", 239 | Dimension: nil, 240 | Host: "", 241 | Metric: "", 242 | Spec: nil, 243 | Status: nil, 244 | }, 245 | want: `{"name":"","host":"","metric":"","vector_type":"","dimension":null}`, 246 | }, 247 | } 248 | 249 | for _, tt := range tests { 250 | t.Run(tt.name, func(c *testing.T) { 251 | got, err := json.Marshal(tt.input) 252 | if err != nil { 253 | c.Errorf("Failed to marshal Index: %v", err) 254 | return 255 | } 256 | if string(got) != tt.want { 257 | c.Errorf("Marshal Index got = %s, want = %s", string(got), tt.want) 258 | } 259 | }) 260 | } 261 | } 262 | 263 | func TestMarshalCollectionUnit(t *testing.T) { 264 | tests := []struct { 265 | name string 266 | input Collection 267 | want string 268 | }{ 269 | { 270 | name: "All fields present", 271 | input: Collection{ 272 | Name: "test-collection", 273 | Size: 15328, 274 | Status: "Ready", 275 | Dimension: 132, 276 | VectorCount: 15000, 277 | Environment: "us-west-2", 278 | }, 279 | want: `{"name":"test-collection","size":15328,"status":"Ready","dimension":132,"vector_count":15000,"environment":"us-west-2"}`, 280 | }, 281 | { 282 | name: "Fields omitted", 283 | input: Collection{}, 284 | want: `{"name":"","size":0,"status":"","dimension":0,"vector_count":0,"environment":""}`, 285 | }, 286 | { 287 | name: "Fields empty", 288 | input: Collection{ 289 | Name: "", 290 | Size: 0, 291 | Status: "", 292 | Dimension: 0, 293 | VectorCount: 0, 294 | Environment: "", 295 | }, 296 | want: `{"name":"","size":0,"status":"","dimension":0,"vector_count":0,"environment":""}`, 297 | }, 298 | } 299 | 300 | for _, tt := range tests { 301 | t.Run(tt.name, func(c *testing.T) { 302 | got, err := json.Marshal(tt.input) 303 | if err != nil { 304 | c.Errorf("Failed to marshal Collection: %v", err) 305 | return 306 | } 307 | if string(got) != tt.want { 308 | c.Errorf("Marshal Collection got = %s, want = %s", string(got), tt.want) 309 | } 310 | }) 311 | } 312 | } 313 | 314 | func TestMarshalPodSpecMetadataConfigUnit(t *testing.T) { 315 | tests := []struct { 316 | name string 317 | input PodSpecMetadataConfig 318 | want string 319 | }{ 320 | { 321 | name: "All fields present", 322 | input: PodSpecMetadataConfig{Indexed: &[]string{"genre", "artist"}}, 323 | want: `{"indexed":["genre","artist"]}`, 324 | }, 325 | { 326 | name: "Fields omitted", 327 | input: PodSpecMetadataConfig{}, 328 | want: `{}`, 329 | }, 330 | { 331 | name: "Fields empty", 332 | input: PodSpecMetadataConfig{Indexed: nil}, 333 | want: `{}`, 334 | }, 335 | } 336 | 337 | for _, tt := range tests { 338 | t.Run(tt.name, func(c *testing.T) { 339 | got, err := json.Marshal(tt.input) 340 | if err != nil { 341 | c.Errorf("Failed to marshal PodSpecMetadataConfig: %v", err) 342 | return 343 | } 344 | if string(got) != tt.want { 345 | c.Errorf("Marshal PodSpecMetadataConfig got = %s, want = %s", string(got), tt.want) 346 | } 347 | }) 348 | } 349 | } 350 | 351 | func TestMarshalVectorUnit(t *testing.T) { 352 | metadata, err := structpb.NewStruct(map[string]interface{}{"genre": "rock"}) 353 | if err != nil { 354 | t.Fatalf("Failed to create metadata: %v", err) 355 | } 356 | vecValues := []float32{0.1, 0.2, 0.3} 357 | 358 | tests := []struct { 359 | name string 360 | input Vector 361 | want string 362 | }{ 363 | { 364 | name: "All fields present", 365 | input: Vector{ 366 | Id: "vector-1", 367 | Values: &vecValues, 368 | Metadata: metadata, 369 | SparseValues: &SparseValues{ 370 | Indices: []uint32{1, 2, 3}, 371 | Values: []float32{0.1, 0.2, 0.3}, 372 | }, 373 | }, 374 | want: `{"id":"vector-1","values":[0.1,0.2,0.3],"sparse_values":{"indices":[1,2,3],"values":[0.1,0.2,0.3]},"metadata":{"genre":"rock"}}`, 375 | }, 376 | { 377 | name: "Fields omitted", 378 | input: Vector{}, 379 | want: `{"id":""}`, 380 | }, 381 | { 382 | name: "Fields empty", 383 | input: Vector{Id: "", Values: nil, SparseValues: nil, Metadata: nil}, 384 | want: `{"id":""}`, 385 | }, 386 | } 387 | 388 | for _, tt := range tests { 389 | t.Run(tt.name, func(c *testing.T) { 390 | got, err := json.Marshal(tt.input) 391 | if err != nil { 392 | c.Errorf("Failed to marshal Vector: %v", err) 393 | return 394 | } 395 | if string(got) != tt.want { 396 | c.Errorf("Marshal Vector got = %s, want = %s", string(got), tt.want) 397 | } 398 | }) 399 | } 400 | } 401 | 402 | func TestMarshalScoredVectorUnit(t *testing.T) { 403 | metadata, err := structpb.NewStruct(map[string]interface{}{"genre": "rock"}) 404 | if err != nil { 405 | t.Fatalf("Failed to create metadata: %v", err) 406 | } 407 | vecValues := []float32{0.1, 0.2, 0.3} 408 | 409 | tests := []struct { 410 | name string 411 | input ScoredVector 412 | want string 413 | }{ 414 | { 415 | name: "All fields present", 416 | input: ScoredVector{ 417 | Vector: &Vector{ 418 | Id: "vector-1", 419 | Values: &vecValues, 420 | Metadata: metadata, 421 | SparseValues: &SparseValues{ 422 | Indices: []uint32{1, 2, 3}, 423 | Values: []float32{0.1, 0.2, 0.3}, 424 | }, 425 | }, 426 | Score: 0.9, 427 | }, 428 | want: `{"vector":{"id":"vector-1","values":[0.1,0.2,0.3],"sparse_values":{"indices":[1,2,3],"values":[0.1,0.2,0.3]},"metadata":{"genre":"rock"}},"score":0.9}`, 429 | }, 430 | { 431 | name: "Fields omitted", 432 | input: ScoredVector{}, 433 | want: `{"score":0}`, 434 | }, 435 | { 436 | name: "Fields empty", 437 | input: ScoredVector{Vector: nil, Score: 0}, 438 | want: `{"score":0}`, 439 | }, 440 | } 441 | 442 | for _, tt := range tests { 443 | t.Run(tt.name, func(c *testing.T) { 444 | got, err := json.Marshal(tt.input) 445 | if err != nil { 446 | c.Errorf("Failed to marshal ScoredVector: %v", err) 447 | return 448 | } 449 | if string(got) != tt.want { 450 | c.Errorf("Marshal ScoredVector got = %s, want = %s", string(got), tt.want) 451 | } 452 | }) 453 | } 454 | } 455 | 456 | func TestMarshalSparseValuesUnit(t *testing.T) { 457 | tests := []struct { 458 | name string 459 | input SparseValues 460 | want string 461 | }{ 462 | { 463 | name: "All fields present", 464 | input: SparseValues{ 465 | Indices: []uint32{1, 2, 3}, 466 | Values: []float32{0.1, 0.2, 0.3}, 467 | }, 468 | want: `{"indices":[1,2,3],"values":[0.1,0.2,0.3]}`, 469 | }, 470 | { 471 | name: "Fields omitted", 472 | input: SparseValues{}, 473 | want: `{}`, 474 | }, 475 | { 476 | name: "Fields empty", 477 | input: SparseValues{Indices: nil, Values: nil}, 478 | want: `{}`, 479 | }, 480 | } 481 | 482 | for _, tt := range tests { 483 | t.Run(tt.name, func(c *testing.T) { 484 | got, err := json.Marshal(tt.input) 485 | if err != nil { 486 | c.Errorf("Failed to marshal SparseValues: %v", err) 487 | return 488 | } 489 | if string(got) != tt.want { 490 | c.Errorf("Marshal SparseValues got = %s, want = %s", string(got), tt.want) 491 | } 492 | }) 493 | } 494 | } 495 | 496 | func TestMarshalNamespaceSummaryUnit(t *testing.T) { 497 | tests := []struct { 498 | name string 499 | input NamespaceSummary 500 | want string 501 | }{ 502 | { 503 | name: "All fields present", 504 | input: NamespaceSummary{VectorCount: 15000}, 505 | want: `{"vector_count":15000}`, 506 | }, 507 | { 508 | name: "Fields omitted", 509 | input: NamespaceSummary{}, 510 | want: `{"vector_count":0}`, 511 | }, 512 | { 513 | name: "Fields empty", 514 | input: NamespaceSummary{VectorCount: 0}, 515 | want: `{"vector_count":0}`, 516 | }, 517 | } 518 | 519 | for _, tt := range tests { 520 | t.Run(tt.name, func(c *testing.T) { 521 | got, err := json.Marshal(tt.input) 522 | if err != nil { 523 | c.Errorf("Failed to marshal NamespaceSummary: %v", err) 524 | return 525 | } 526 | if string(got) != tt.want { 527 | c.Errorf("Marshal NamespaceSummary got = %s, want = %s", string(got), tt.want) 528 | } 529 | }) 530 | } 531 | } 532 | 533 | func TestMarshalUsageUnit(t *testing.T) { 534 | tests := []struct { 535 | name string 536 | input Usage 537 | want string 538 | }{ 539 | { 540 | name: "All fields present", 541 | input: Usage{ReadUnits: 100}, 542 | want: `{"read_units":100}`, 543 | }, 544 | { 545 | name: "Fields omitted", 546 | input: Usage{}, 547 | want: `{"read_units":0}`, 548 | }, 549 | { 550 | name: "Fields empty", 551 | input: Usage{ReadUnits: 0}, 552 | want: `{"read_units":0}`, 553 | }, 554 | } 555 | 556 | for _, tt := range tests { 557 | t.Run(tt.name, func(c *testing.T) { 558 | got, err := json.Marshal(tt.input) 559 | if err != nil { 560 | c.Errorf("Failed to marshal Usage: %v", err) 561 | return 562 | } 563 | if string(got) != tt.want { 564 | c.Errorf("Marshal Usage got = %s, want = %s", string(got), tt.want) 565 | } 566 | }) 567 | } 568 | } 569 | 570 | func TestMarshalIndexEmbedUnit(t *testing.T) { 571 | dimension := int32(128) 572 | metric := IndexMetric("cosine") 573 | vectorType := "sparse" 574 | fieldMap := map[string]interface{}{ 575 | "text-field": "my-text-field", 576 | } 577 | readParameters := map[string]interface{}{ 578 | "readParam": "readParamValue", 579 | } 580 | writeParameters := map[string]interface{}{ 581 | "writeParam": "writeParamValue", 582 | } 583 | 584 | tests := []struct { 585 | name string 586 | input IndexEmbed 587 | want string 588 | }{ 589 | { 590 | name: "All fields present", 591 | input: IndexEmbed{ 592 | Model: "multilingual-e5-large", 593 | Dimension: &dimension, 594 | Metric: &metric, 595 | VectorType: &vectorType, 596 | FieldMap: &fieldMap, 597 | ReadParameters: &readParameters, 598 | WriteParameters: &writeParameters, 599 | }, 600 | want: `{"model":"multilingual-e5-large","dimension":128,"metric":"cosine","vector_type":"sparse","field_map":{"text-field":"my-text-field"},"read_parameters":{"readParam":"readParamValue"},"write_parameters":{"writeParam":"writeParamValue"}}`, 601 | }, 602 | { 603 | name: "Fields omitted", 604 | input: IndexEmbed{}, 605 | want: `{"model":""}`, 606 | }, 607 | { 608 | name: "Fields empty", 609 | input: IndexEmbed{ 610 | Model: "", 611 | Dimension: nil, 612 | Metric: nil, 613 | VectorType: nil, 614 | FieldMap: nil, 615 | ReadParameters: nil, 616 | WriteParameters: nil, 617 | }, 618 | want: `{"model":""}`, 619 | }, 620 | } 621 | 622 | for _, tt := range tests { 623 | t.Run(tt.name, func(c *testing.T) { 624 | got, err := json.Marshal(tt.input) 625 | if err != nil { 626 | c.Errorf("Failed to marshal IndexEmbed: %v", err) 627 | } 628 | if string(got) != tt.want { 629 | c.Errorf("Marshal IndexEmbed got = %s, want = %s", string(got), tt.want) 630 | } 631 | }) 632 | } 633 | } 634 | -------------------------------------------------------------------------------- /pinecone/suite_runner_test.go: -------------------------------------------------------------------------------- 1 | // This file is used to run all the test suites in the package pinecone 2 | package pinecone 3 | 4 | import ( 5 | "os" 6 | "testing" 7 | 8 | "github.com/stretchr/testify/assert" 9 | "github.com/stretchr/testify/require" 10 | "github.com/stretchr/testify/suite" 11 | ) 12 | 13 | // This is the entry point for all integration tests 14 | // This test function is picked up by go test and triggers the suite runs 15 | func TestRunSuites(t *testing.T) { 16 | RunSuites(t) 17 | } 18 | 19 | func RunSuites(t *testing.T) { 20 | apiKey, present := os.LookupEnv("PINECONE_API_KEY") 21 | assert.True(t, present, "PINECONE_API_KEY env variable not set") 22 | 23 | sourceTag := "pinecone_test_go_sdk" 24 | client, err := NewClient(NewClientParams{ApiKey: apiKey, SourceTag: sourceTag}) 25 | require.NotNil(t, client, "Client should not be nil after creation") 26 | require.NoError(t, err) 27 | indexTags := IndexTags{"test1": "test-tag-1", "test2": "test-tag-2"} 28 | 29 | serverlessIdx := buildServerlessTestIndex(client, "serverless-"+generateTestIndexName(), indexTags) 30 | podIdx := buildPodTestIndex(client, "pods-"+generateTestIndexName(), indexTags) 31 | 32 | podTestSuite := &IntegrationTests{ 33 | apiKey: apiKey, 34 | indexType: "pods", 35 | host: podIdx.Host, 36 | dimension: podIdx.Dimension, 37 | client: client, 38 | sourceTag: sourceTag, 39 | idxName: podIdx.Name, 40 | indexTags: &indexTags, 41 | } 42 | 43 | serverlessTestSuite := &IntegrationTests{ 44 | apiKey: apiKey, 45 | indexType: "serverless", 46 | host: serverlessIdx.Host, 47 | dimension: serverlessIdx.Dimension, 48 | client: client, 49 | sourceTag: sourceTag, 50 | idxName: serverlessIdx.Name, 51 | indexTags: &indexTags, 52 | } 53 | 54 | suite.Run(t, podTestSuite) 55 | suite.Run(t, serverlessTestSuite) 56 | } 57 | -------------------------------------------------------------------------------- /pinecone/test_suite.go: -------------------------------------------------------------------------------- 1 | package pinecone 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "log" 7 | "math/rand" 8 | "testing" 9 | "time" 10 | 11 | "github.com/google/uuid" 12 | "github.com/stretchr/testify/require" 13 | "github.com/stretchr/testify/suite" 14 | ) 15 | 16 | type IntegrationTests struct { 17 | suite.Suite 18 | apiKey string 19 | client *Client 20 | host string 21 | dimension *int32 22 | indexType string 23 | vectorIds []string 24 | idxName string 25 | idxConn *IndexConnection 26 | collectionName string 27 | sourceTag string 28 | indexTags *IndexTags 29 | } 30 | 31 | func (ts *IntegrationTests) SetupSuite() { 32 | ctx := context.Background() 33 | 34 | _, err := waitUntilIndexReady(ts, ctx) 35 | require.NoError(ts.T(), err) 36 | 37 | namespace := uuid.New().String() 38 | 39 | idxConn, err := ts.client.Index(NewIndexConnParams{ 40 | Host: ts.host, 41 | Namespace: namespace, 42 | }) 43 | 44 | require.NoError(ts.T(), err) 45 | require.NotNil(ts.T(), idxConn, "Failed to create idxConn") 46 | 47 | ts.idxConn = idxConn 48 | dim := int32(0) 49 | if ts.dimension != nil { 50 | dim = *ts.dimension 51 | } 52 | 53 | // Deterministically create vectors 54 | vectors := generateVectors(10, dim, false, nil) 55 | 56 | // Add vector ids to the suite 57 | vectorIds := make([]string, len(vectors)) 58 | for i, v := range vectors { 59 | vectorIds[i] = v.Id 60 | } 61 | 62 | // Upsert vectors 63 | err = upsertVectors(ts, ctx, vectors) 64 | if err != nil { 65 | log.Fatalf("Failed to upsert vectors in SetupSuite: %v", err) 66 | } 67 | 68 | // Wait for vector freshness 69 | err = pollIndexForFreshness(ts, ctx, vectorIds[0]) 70 | if err != nil { 71 | log.Fatalf("Vector freshness failed in SetupSuite: %v", err) 72 | } 73 | 74 | // Create collection for pod index suite 75 | if ts.indexType == "pods" { 76 | createCollection(ts, ctx) 77 | } 78 | 79 | fmt.Printf("\n %s set up suite completed successfully\n", ts.indexType) 80 | 81 | // Poll for data freshness 82 | } 83 | 84 | func (ts *IntegrationTests) TearDownSuite() { 85 | ctx := context.Background() 86 | 87 | // Close index connection 88 | err := ts.idxConn.Close() 89 | require.NoError(ts.T(), err) 90 | 91 | // Delete collection 92 | if ts.collectionName != "" { 93 | err = ts.client.DeleteCollection(ctx, ts.collectionName) 94 | require.NoError(ts.T(), err) 95 | 96 | // Before moving on to deleting the index, wait for collection to be cleaned up 97 | time.Sleep(3 * time.Second) 98 | } 99 | 100 | // Delete test index 101 | err = ts.client.DeleteIndex(ctx, ts.idxName) 102 | 103 | // If the index failed to delete, wait a bit and retry cleaning up 104 | // Somtimes indexes are stuck upgrading, or have pending collections 105 | retry := 4 106 | for err != nil && retry > 0 { 107 | time.Sleep(5 * time.Second) 108 | fmt.Printf("Failed to delete index \"%s\". Retrying... (%d/4)\n", ts.idxName, 5-retry) 109 | err = ts.client.DeleteIndex(ctx, ts.idxName) 110 | retry-- 111 | } 112 | 113 | if err != nil { 114 | fmt.Printf("Failed to delete index \"%s\" after 4 retries: %v\n", ts.idxName, err) 115 | } 116 | 117 | fmt.Printf("\n %s setup suite torn down successfully\n", ts.indexType) 118 | } 119 | 120 | // Helper funcs 121 | func generateTestIndexName() string { 122 | return fmt.Sprintf("index-%d", time.Now().UnixMilli()) 123 | } 124 | 125 | func upsertVectors(ts *IntegrationTests, ctx context.Context, vectors []*Vector) error { 126 | _, err := waitUntilIndexReady(ts, ctx) 127 | require.NoError(ts.T(), err) 128 | 129 | ids := make([]string, len(vectors)) 130 | for i, v := range vectors { 131 | ids[i] = v.Id 132 | } 133 | 134 | upsertVectors, err := ts.idxConn.UpsertVectors(ctx, vectors) 135 | require.NoError(ts.T(), err) 136 | fmt.Printf("Upserted vectors: %v into host: %s\n", upsertVectors, ts.host) 137 | 138 | ts.vectorIds = append(ts.vectorIds, ids...) 139 | 140 | return nil 141 | } 142 | 143 | func createCollection(ts *IntegrationTests, ctx context.Context) { 144 | name := uuid.New().String() 145 | sourceIndex := ts.idxName 146 | 147 | ts.collectionName = name 148 | 149 | collection, err := ts.client.CreateCollection(ctx, &CreateCollectionRequest{ 150 | Name: name, 151 | Source: sourceIndex, 152 | }) 153 | 154 | require.NoError(ts.T(), err) 155 | require.Equal(ts.T(), name, collection.Name) 156 | } 157 | 158 | func waitUntilIndexReady(ts *IntegrationTests, ctx context.Context) (bool, error) { 159 | start := time.Now() 160 | delay := 5 * time.Second 161 | maxWaitTimeSeconds := 280 * time.Second 162 | 163 | for { 164 | index, err := ts.client.DescribeIndex(ctx, ts.idxName) 165 | require.NoError(ts.T(), err) 166 | 167 | if index.Status.Ready && index.Status.State == Ready { 168 | fmt.Printf("Index \"%s\" is ready after %f seconds\n", ts.idxName, time.Since(start).Seconds()) 169 | return true, err 170 | } 171 | 172 | totalSeconds := time.Since(start) 173 | 174 | if totalSeconds >= maxWaitTimeSeconds { 175 | return false, fmt.Errorf("Index \"%s\" not ready after %f seconds", ts.idxName, totalSeconds.Seconds()) 176 | } 177 | 178 | fmt.Printf("Index \"%s\" not ready yet, retrying... (%f/%f)\n", ts.idxName, totalSeconds.Seconds(), maxWaitTimeSeconds.Seconds()) 179 | time.Sleep(delay) 180 | } 181 | } 182 | 183 | func generateVectors(numOfVectors int, dimension int32, isSparse bool, metadata *Metadata) []*Vector { 184 | vectors := make([]*Vector, numOfVectors) 185 | 186 | for i := 0; i < int(numOfVectors); i++ { 187 | vectors[i] = &Vector{ 188 | Id: fmt.Sprintf("vector-%d", i), 189 | } 190 | 191 | if isSparse { 192 | var sparseValues SparseValues 193 | for j := 0; j < int(dimension); j++ { 194 | sparseValues.Indices = append(sparseValues.Indices, uint32(j)) 195 | } 196 | values := generateVectorValues(dimension) 197 | sparseValues.Values = *values 198 | vectors[i].SparseValues = &sparseValues 199 | } else { 200 | values := generateVectorValues(dimension) 201 | vectors[i].Values = values 202 | } 203 | 204 | if metadata != nil { 205 | vectors[i].Metadata = metadata 206 | } 207 | } 208 | 209 | return vectors 210 | } 211 | 212 | func generateVectorValues(dimension int32) *[]float32 { 213 | maxInt := 1000000 // A large integer to normalize the float values 214 | values := make([]float32, dimension) 215 | 216 | for i := int32(0); i < dimension; i++ { 217 | // Generate a random integer and normalize it to the range [0, 1) 218 | values[i] = float32(rand.Intn(maxInt)) / float32(maxInt) 219 | } 220 | 221 | return &values 222 | } 223 | 224 | func buildServerlessTestIndex(in *Client, idxName string, tags IndexTags) *Index { 225 | ctx := context.Background() 226 | dimension := int32(setDimensionsForTestIndexes()) 227 | metric := Cosine 228 | 229 | fmt.Printf("Creating Serverless index: %s\n", idxName) 230 | serverlessIdx, err := in.CreateServerlessIndex(ctx, &CreateServerlessIndexRequest{ 231 | Name: idxName, 232 | Dimension: &dimension, 233 | Metric: &metric, 234 | Region: "us-east-1", 235 | Cloud: "aws", 236 | Tags: &tags, 237 | }) 238 | if err != nil { 239 | log.Fatalf("Failed to create Serverless index \"%s\" in integration test: %v", err, idxName) 240 | } else { 241 | fmt.Printf("Successfully created a new Serverless index: %s!\n", idxName) 242 | } 243 | return serverlessIdx 244 | } 245 | 246 | func buildPodTestIndex(in *Client, name string, tags IndexTags) *Index { 247 | ctx := context.Background() 248 | metric := Cosine 249 | 250 | fmt.Printf("Creating pod index: %s\n", name) 251 | podIdx, err := in.CreatePodIndex(ctx, &CreatePodIndexRequest{ 252 | Name: name, 253 | Dimension: int32(setDimensionsForTestIndexes()), 254 | Metric: &metric, 255 | Environment: "us-east-1-aws", 256 | PodType: "p1", 257 | Tags: &tags, 258 | }) 259 | if err != nil { 260 | log.Fatalf("Failed to create pod index in buildPodTestIndex test: %v", err) 261 | } else { 262 | fmt.Printf("Successfully created a new pod index: %s!\n", name) 263 | } 264 | return podIdx 265 | } 266 | 267 | func retryAssertions(t *testing.T, maxRetries int, delay time.Duration, fn func() error) { 268 | for attempt := 1; attempt <= maxRetries; attempt++ { 269 | // function call passed, we return 270 | if err := fn(); err == nil { 271 | return 272 | } else if attempt < maxRetries { 273 | t.Logf("Attempt %d/%d failed: %+v. Retrying in %f...", attempt, maxRetries, err, delay.Seconds()) 274 | time.Sleep(delay) 275 | } else { 276 | t.Fatalf("Test failed after %d attempts: %+v", maxRetries, err) 277 | } 278 | } 279 | } 280 | 281 | func retryAssertionsWithDefaults(t *testing.T, fn func() error) { 282 | retryAssertions(t, 30, 5*time.Second, fn) 283 | } 284 | 285 | func pollIndexForFreshness(ts *IntegrationTests, ctx context.Context, sampleId string) error { 286 | maxSleep := 240 * time.Second 287 | delay := 5 * time.Second 288 | totalWait := 0 * time.Second 289 | 290 | fetchResp, _ := ts.idxConn.FetchVectors(ctx, []string{sampleId}) 291 | queryResp, _ := ts.idxConn.QueryByVectorId(ctx, &QueryByVectorIdRequest{VectorId: sampleId, TopK: 1}) 292 | for len(fetchResp.Vectors) == 0 && len(queryResp.Matches) == 0 { 293 | if totalWait >= maxSleep { 294 | return fmt.Errorf("timed out waiting for vector freshness") 295 | } 296 | fmt.Printf("Vector not fresh for id: %s, waiting %+v seconds...\n", sampleId, delay.Seconds()) 297 | time.Sleep(delay) 298 | totalWait += delay 299 | 300 | fetchResp, _ = ts.idxConn.FetchVectors(ctx, []string{sampleId}) 301 | queryResp, _ = ts.idxConn.QueryByVectorId(ctx, &QueryByVectorIdRequest{VectorId: sampleId, TopK: 1}) 302 | } 303 | return nil 304 | } 305 | 306 | func setDimensionsForTestIndexes() uint32 { 307 | return uint32(5) 308 | } 309 | --------------------------------------------------------------------------------