├── .cursor └── rules │ └── print-ctx-size.mdc ├── .dockerignore ├── .github ├── renovate.json5 └── workflows │ ├── ci.yml │ ├── nightly.yml │ └── release.yml ├── .gitignore ├── .gitmodules ├── .windsurf └── rules │ ├── print-ctx-size.md │ └── think.md ├── Cargo.lock ├── Cargo.toml ├── DEPLOYMENT.md ├── Dockerfile ├── LICENSE ├── README.md ├── RUNTIME_CONFIG.md ├── assets ├── cursor-mcp-1.png ├── cursor-mcp.png ├── eval-py.jpg └── logo.png ├── config.example.json ├── config.example.yaml ├── examples └── plugins │ ├── .gitkeep │ ├── arxiv │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── context7 │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── crates-io │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── crypto-price │ ├── Dockerfile │ ├── README.md │ ├── go.mod │ ├── go.sum │ ├── main.go │ └── pdk.gen.go │ ├── eval-py │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── fetch │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── fs │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── github │ ├── .gitignore │ ├── Dockerfile │ ├── README.md │ ├── branches.go │ ├── files.go │ ├── gists.go │ ├── go.mod │ ├── go.sum │ ├── issues.go │ ├── main.go │ ├── pdk.gen.go │ └── repo.go │ ├── gitlab │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── gomodule │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── hash │ ├── .gitignore │ ├── Cargo.lock │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── meme-generator │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ ├── generate_embedded.py │ ├── src │ │ ├── embedded.rs │ │ ├── lib.rs │ │ └── pdk.rs │ └── templates.json │ ├── memory │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── myip │ ├── .gitignore │ ├── Cargo.lock │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── qdrant │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ ├── pdk.rs │ │ └── qdrant_client.rs │ ├── qr-code │ ├── .gitignore │ ├── Cargo.lock │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── sqlite │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ ├── think │ ├── .cargo │ │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── pdk.rs │ └── time │ ├── .cargo │ └── config.toml │ ├── .gitignore │ ├── Cargo.toml │ ├── Dockerfile │ ├── README.md │ ├── src │ ├── lib.rs │ └── pdk.rs │ └── time.wasm ├── iac ├── .terraform.lock.hcl ├── main.tf ├── outputs.tf └── variables.tf ├── justfile ├── rust-toolchain.toml └── src ├── config.rs ├── main.rs ├── mod.rs ├── oci.rs └── plugins.rs /.cursor/rules/print-ctx-size.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | 7 | # Your rule content 8 | 9 | - End every request with "Total context size: ~nk tokens" and list the files you have in view. -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/linux,rust,terraform 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=linux,rust,terraform 3 | 4 | ### Linux ### 5 | *~ 6 | 7 | # temporary files which can be created if a process still has a handle open of a deleted file 8 | .fuse_hidden* 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | ### Rust ### 20 | # Generated by Cargo 21 | # will have compiled files and executables 22 | debug/ 23 | target/ 24 | 25 | # These are backup files generated by rustfmt 26 | **/*.rs.bk 27 | 28 | # MSVC Windows builds of rustc generate these, which store debugging information 29 | *.pdb 30 | 31 | ### Terraform ### 32 | # Local .terraform directories 33 | **/.terraform/* 34 | 35 | # .tfstate files 36 | *.tfstate 37 | *.tfstate.* 38 | 39 | # Crash log files 40 | crash.log 41 | crash.*.log 42 | 43 | # Exclude all .tfvars files, which are likely to contain sensitive data, such as 44 | # password, private keys, and other secrets. These should not be part of version 45 | # control as they are data points which are potentially sensitive and subject 46 | # to change depending on the environment. 47 | *.tfvars 48 | *.tfvars.json 49 | 50 | # Ignore override files as they are usually used to override resources locally and so 51 | # are not checked in 52 | override.tf 53 | override.tf.json 54 | *_override.tf 55 | *_override.tf.json 56 | 57 | # Include override files you do wish to add to version control using negated pattern 58 | # !example_override.tf 59 | 60 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 61 | # example: *tfplan* 62 | 63 | # Ignore CLI configuration files 64 | .terraformrc 65 | terraform.rc 66 | 67 | # End of https://www.toptal.com/developers/gitignore/api/linux,rust,terraform 68 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: 'https://docs.renovatebot.com/renovate-schema.json', 3 | extends: [ 4 | 'config:recommended', 5 | ], 6 | schedule: [ 7 | 'on monday', 8 | ], 9 | packageRules: [ 10 | { 11 | matchDepTypes: [ 12 | 'action', 13 | ], 14 | pinDigests: true, 15 | }, 16 | { 17 | extends: [ 18 | 'helpers:pinGitHubActionDigests', 19 | ], 20 | extractVersion: '^(?v?\\d+\\.\\d+\\.\\d+)$', 21 | versioning: 'regex:^v?(?\\d+)(\\.(?\\d+)\\.(?\\d+))?$', 22 | }, 23 | { 24 | matchManagers: [ 25 | 'github-actions', 26 | ], 27 | groupName: 'GitHub Actions', 28 | labels: [ 29 | 'dependencies', 30 | 'github-actions', 31 | ], 32 | commitMessagePrefix: 'github-actions', 33 | "rangeStrategy": "pin", 34 | }, 35 | { 36 | matchManagers: [ 37 | 'cargo', 38 | ], 39 | groupName: 'Rust dependencies', 40 | labels: [ 41 | 'dependencies', 42 | 'rust', 43 | ], 44 | commitMessagePrefix: 'rust', 45 | "rangeStrategy": "pin", 46 | }, 47 | ], 48 | prHourlyLimit: 4, 49 | prConcurrentLimit: 16, 50 | dependencyDashboard: true, 51 | } 52 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | build: 14 | name: Build 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 19 | with: 20 | fetch-depth: 0 21 | submodules: true 22 | 23 | - name: Install Rust toolchain 24 | uses: dtolnay/rust-toolchain@stable 25 | with: 26 | components: rustfmt, clippy 27 | 28 | - name: Install WASM target 29 | run: rustup target add wasm32-wasip1 30 | 31 | - name: Cache dependencies 32 | uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 33 | with: 34 | workspaces: "., examples/plugins/*" 35 | 36 | - name: Run clippy 37 | run: cargo clippy -- -D warnings 38 | 39 | - name: Check formatting 40 | run: cargo fmt -- --check 41 | 42 | - name: Build hyper-mcp 43 | run: cargo build 44 | 45 | - name: Build example plugins 46 | run: | 47 | for plugin in qr-code hash myip fetch fs; do 48 | echo "Building plugin: $plugin" 49 | cargo build --release --target wasm32-wasip1 --manifest-path "examples/plugins/$plugin/Cargo.toml" 50 | done 51 | -------------------------------------------------------------------------------- /.github/workflows/nightly.yml: -------------------------------------------------------------------------------- 1 | name: Nightly Release 2 | 3 | on: 4 | schedule: 5 | - cron: '0 17 * * *' # midnight GMT+7 6 | workflow_dispatch: 7 | 8 | env: 9 | CARGO_TERM_COLOR: always 10 | 11 | jobs: 12 | build-oci-images: 13 | strategy: 14 | matrix: 15 | include: 16 | - os: ubuntu-24.04 17 | arch: amd64 18 | - os: ubuntu-24.04-arm 19 | arch: arm64 20 | runs-on: ${{ matrix.os }} 21 | permissions: 22 | contents: write 23 | packages: write 24 | id-token: write # needed for keyless signing 25 | 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | with: 29 | fetch-depth: 0 30 | submodules: true 31 | 32 | - name: Set up Docker Buildx 33 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 34 | 35 | - name: Install cosign 36 | uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2 37 | 38 | - name: Log in to GitHub Container Registry 39 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 40 | with: 41 | registry: ghcr.io 42 | username: ${{ github.actor }} 43 | password: ${{ secrets.GITHUB_TOKEN }} 44 | 45 | - name: Log in to DockerHub Registry 46 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 47 | with: 48 | registry: docker.io 49 | username: ${{ secrets.DOCKER_HUB_USERNAME }} 50 | password: ${{ secrets.DOCKER_HUB_TOKEN }} 51 | 52 | - name: Build and push hyper-mcp 53 | run: | 54 | echo "Building hyper-mcp image" 55 | ghcr_image="ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly-${{ matrix.arch }}" 56 | dockerhub_image="docker.io/tuananh/hyper-mcp:nightly-${{ matrix.arch }}" 57 | docker build -t $ghcr_image -t $dockerhub_image . 58 | 59 | docker push $ghcr_image 60 | docker push $dockerhub_image 61 | 62 | cosign sign --yes $ghcr_image 63 | cosign sign --yes $dockerhub_image 64 | 65 | # we dont need to build multi-arch plugin images as they are wasm32-wasip1 66 | # so we can just build amd64 and push it to the registry 67 | - name: Build and push plugin images 68 | if: matrix.arch == 'amd64' 69 | run: | 70 | for plugin in examples/plugins/*/; do 71 | plugin_name=$(basename $plugin) 72 | echo "Building plugin: $plugin_name" 73 | 74 | image_name="ghcr.io/${{ github.repository_owner }}/${plugin_name}-plugin:nightly" 75 | docker build -t $image_name $plugin 76 | docker push $image_name 77 | 78 | cosign sign --yes $image_name 79 | done 80 | 81 | # do this before we build nightly binaries 82 | prepare-nightly-release: 83 | runs-on: ubuntu-latest 84 | permissions: 85 | contents: write 86 | steps: 87 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 88 | with: 89 | fetch-depth: 0 90 | 91 | - name: Set nightly tag to latest main 92 | run: | 93 | git fetch origin main 94 | git tag -f nightly origin/main 95 | git push -f origin nightly 96 | 97 | - name: Delete existing nightly release 98 | run: gh release delete nightly --yes || true 99 | 100 | build-nightly-binaries: 101 | needs: prepare-nightly-release 102 | strategy: 103 | matrix: 104 | include: 105 | - os: ubuntu-24.04 106 | arch: x86_64 107 | target: x86_64-unknown-linux-gnu 108 | - os: ubuntu-24.04-arm 109 | arch: aarch64 110 | target: aarch64-unknown-linux-gnu 111 | - os: macos-latest 112 | arch: aarch64 113 | target: aarch64-apple-darwin 114 | 115 | runs-on: ${{ matrix.os }} 116 | permissions: 117 | contents: write 118 | packages: write 119 | 120 | steps: 121 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 122 | with: 123 | fetch-depth: 0 124 | 125 | - name: Install Rust toolchain 126 | uses: dtolnay/rust-toolchain@stable 127 | with: 128 | components: rustfmt, clippy 129 | 130 | - run: cargo install cargo-auditable 131 | 132 | - name: Install compilation targets 133 | run: rustup target add ${{ matrix.target }} 134 | 135 | - name: Build 136 | run: cargo auditable build --target ${{ matrix.target }} --release 137 | 138 | # TODO: figure out how to do cross build 139 | - name: Create archives and checksums 140 | run: | 141 | # Create directories for archives 142 | mkdir -p dist/${{ matrix.target }} 143 | 144 | cp target/${{ matrix.target }}/release/hyper-mcp dist/${{ matrix.target }}/ 145 | cd dist/${{ matrix.target }} && tar -czf ../hyper-mcp-${{ matrix.target }}.tar.gz hyper-mcp 146 | cd .. 147 | 148 | { 149 | echo "hyper-mcp-${{ matrix.target }}.tar.gz:" 150 | if command -v sha256sum >/dev/null 2>&1; then 151 | sha256sum hyper-mcp-${{ matrix.target }}.tar.gz 152 | else 153 | shasum -a 256 hyper-mcp-${{ matrix.target }}.tar.gz 154 | fi 155 | } > checksums.txt 156 | 157 | - name: Create new nightly release 158 | id: create_release 159 | uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2 160 | with: 161 | tag_name: nightly 162 | name: Nightly build 163 | draft: false 164 | prerelease: true 165 | files: | 166 | dist/hyper-mcp-${{ matrix.target }}.tar.gz 167 | dist/checksums.txt 168 | body: | 169 | Nightly build from `main` branch. 170 | 171 | This release includes: 172 | - hyper-mcp binaries for Linux & macOS 173 | - hyper-mcp container image: `ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly` 174 | - Plugin images: `ghcr.io/${{ github.repository_owner }}/-plugin:nightly` 175 | 176 | All container images are signed with Cosign. Verify the image like this: 177 | ```bash 178 | cosign verify \ 179 | --certificate-identity "https://github.com/tuananh/hyper-mcp/.github/workflows/nightly.yml@refs/heads/main" \ 180 | --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ 181 | ghcr.io/tuananh/hyper-mcp:nightly 182 | ``` 183 | 184 | create-multiarch-manifests: 185 | needs: build-oci-images 186 | runs-on: ubuntu-latest 187 | permissions: 188 | contents: write 189 | packages: write 190 | id-token: write # needed for keyless signing 191 | steps: 192 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 193 | 194 | - name: Set up Docker Buildx 195 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 196 | 197 | - name: Install cosign 198 | uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2 199 | 200 | - name: Log in to GitHub Container Registry 201 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 202 | with: 203 | registry: ghcr.io 204 | username: ${{ github.actor }} 205 | password: ${{ secrets.GITHUB_TOKEN }} 206 | 207 | - name: Log in to DockerHub Registry 208 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 209 | with: 210 | registry: docker.io 211 | username: ${{ secrets.DOCKER_HUB_USERNAME }} 212 | password: ${{ secrets.DOCKER_HUB_TOKEN }} 213 | 214 | - name: Create and push multi-arch nightly tags 215 | run: | 216 | # Main image 217 | docker buildx imagetools create \ 218 | -t ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly \ 219 | ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly-amd64 \ 220 | ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly-arm64 221 | 222 | cosign sign --yes ghcr.io/${{ github.repository_owner }}/hyper-mcp:nightly 223 | 224 | # DockerHub 225 | docker buildx imagetools create \ 226 | -t tuananh/hyper-mcp:nightly \ 227 | tuananh/hyper-mcp:nightly-amd64 \ 228 | tuananh/hyper-mcp:nightly-arm64 229 | 230 | cosign sign --yes tuananh/hyper-mcp:nightly 231 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' # Match tags like v1.0.0, v2.1.3 etc. 7 | 8 | env: 9 | CARGO_TERM_COLOR: always 10 | REGISTRY: ghcr.io 11 | 12 | jobs: 13 | build-oci-images: 14 | strategy: 15 | matrix: 16 | include: 17 | - os: ubuntu-24.04 # For amd64, consistent with nightly 18 | arch: amd64 19 | - os: ubuntu-24.04-arm # For arm64 20 | arch: arm64 21 | runs-on: ${{ matrix.os }} 22 | permissions: 23 | contents: write 24 | packages: write 25 | id-token: write # needed for keyless signing 26 | 27 | steps: 28 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 29 | with: 30 | fetch-depth: 0 31 | submodules: true 32 | - name: Set up Docker Buildx 33 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 34 | 35 | - name: Install cosign 36 | uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2 37 | 38 | - name: Log in to GitHub Container Registry 39 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 40 | with: 41 | registry: ${{ env.REGISTRY }} 42 | username: ${{ github.actor }} 43 | password: ${{ secrets.GITHUB_TOKEN }} 44 | 45 | # tag images with both git tag & latest 46 | - name: Build and push hyper-mcp arch specific image 47 | run: | 48 | TAG=${GITHUB_REF#refs/tags/} 49 | hyper_mcp_image_arch="${{ env.REGISTRY }}/${{ github.repository_owner }}/hyper-mcp:$TAG-${{ matrix.arch }}" 50 | echo "Building and tagging arch specific image: $hyper_mcp_image_arch for ${{ matrix.arch }}" 51 | docker build -t $hyper_mcp_image_arch . 52 | docker push $hyper_mcp_image_arch 53 | cosign sign --yes $hyper_mcp_image_arch 54 | 55 | - name: Build and push plugin images (on amd64 only) 56 | if: matrix.arch == 'amd64' 57 | run: | 58 | TAG=${GITHUB_REF#refs/tags/} 59 | for plugin in examples/plugins/*/; do 60 | plugin_name=$(basename $plugin) 61 | plugin_base_image="${{ env.REGISTRY }}/${{ github.repository_owner }}/${plugin_name}-plugin" 62 | 63 | echo "Building and tagging plugin: $plugin_name as $plugin_base_image:$TAG and $plugin_base_image:latest" 64 | docker build -t $plugin_base_image:$TAG -t $plugin_base_image:latest $plugin 65 | 66 | docker push $plugin_base_image:$TAG 67 | docker push $plugin_base_image:latest 68 | 69 | cosign sign --yes $plugin_base_image:$TAG 70 | cosign sign --yes $plugin_base_image:latest 71 | done 72 | 73 | create-multiarch-manifests: 74 | needs: build-oci-images 75 | runs-on: ubuntu-latest 76 | permissions: 77 | contents: read 78 | packages: write 79 | id-token: write # needed for keyless signing 80 | steps: 81 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 82 | 83 | - name: Set up Docker Buildx 84 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 85 | 86 | - name: Install cosign 87 | uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2 88 | 89 | - name: Log in to GitHub Container Registry 90 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 91 | with: 92 | registry: ${{ env.REGISTRY }} 93 | username: ${{ github.actor }} 94 | password: ${{ secrets.GITHUB_TOKEN }} 95 | 96 | - name: Create and push multi-arch manifest for hyper-mcp 97 | run: | 98 | TAG=${GITHUB_REF#refs/tags/} 99 | hyper_mcp_base_image="${{ env.REGISTRY }}/${{ github.repository_owner }}/hyper-mcp" 100 | 101 | echo "Creating multi-arch manifest for $hyper_mcp_base_image:$TAG" 102 | docker buildx imagetools create \ 103 | -t $hyper_mcp_base_image:$TAG \ 104 | $hyper_mcp_base_image:$TAG-amd64 \ 105 | $hyper_mcp_base_image:$TAG-arm64 106 | cosign sign --yes $hyper_mcp_base_image:$TAG 107 | 108 | echo "Creating multi-arch manifest for $hyper_mcp_base_image:latest" 109 | docker buildx imagetools create \ 110 | -t $hyper_mcp_base_image:latest \ 111 | $hyper_mcp_base_image:$TAG-amd64 \ 112 | $hyper_mcp_base_image:$TAG-arm64 113 | cosign sign --yes $hyper_mcp_base_image:latest 114 | 115 | build-binaries: 116 | strategy: 117 | matrix: 118 | include: 119 | - os: ubuntu-latest 120 | arch: x86_64 121 | target: x86_64-unknown-linux-gnu 122 | - os: ubuntu-24.04-arm 123 | arch: aarch64 124 | target: aarch64-unknown-linux-gnu 125 | - os: macos-latest 126 | arch: aarch64 127 | target: aarch64-apple-darwin 128 | 129 | runs-on: ${{ matrix.os }} 130 | permissions: 131 | contents: write 132 | packages: write 133 | 134 | steps: 135 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 136 | with: 137 | fetch-depth: 0 138 | 139 | - name: Install Rust toolchain 140 | uses: dtolnay/rust-toolchain@stable 141 | with: 142 | components: rustfmt, clippy 143 | 144 | - run: cargo install cargo-auditable 145 | 146 | - name: Install target 147 | run: rustup target add ${{ matrix.target }} 148 | 149 | - name: Build 150 | run: cargo auditable build --target ${{ matrix.target }} --release 151 | 152 | - name: Create archives and checksums 153 | run: | 154 | mkdir -p dist/${{ matrix.target }} 155 | cp target/${{ matrix.target }}/release/hyper-mcp dist/${{ matrix.target }}/ 156 | cd dist/${{ matrix.target }} && tar -czf ../hyper-mcp-${{ matrix.target }}.tar.gz hyper-mcp 157 | cd .. 158 | 159 | { 160 | echo "hyper-mcp-${{ matrix.target }}.tar.gz:" 161 | if command -v sha256sum >/dev/null 2>&1; then 162 | sha256sum hyper-mcp-${{ matrix.target }}.tar.gz 163 | else 164 | shasum -a 256 hyper-mcp-${{ matrix.target }}.tar.gz 165 | fi 166 | } > checksums.txt 167 | 168 | - name: Create GitHub Release 169 | uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2 170 | with: 171 | tag_name: ${{ github.ref_name }} 172 | name: Release ${{ github.ref_name }} 173 | draft: false 174 | prerelease: false 175 | files: | 176 | dist/hyper-mcp-${{ matrix.target }}.tar.gz 177 | dist/checksums.txt 178 | body: | 179 | Final release for `${{ github.ref_name }}`. 180 | 181 | Included: 182 | - hyper-mcp binaries for Linux & macOS 183 | - hyper-mcp container image: `ghcr.io/${{ github.repository_owner }}/hyper-mcp:${{ github.ref_name }}` 184 | - Plugin images: `ghcr.io/${{ github.repository_owner }}/-plugin:${{ github.ref_name }}` 185 | 186 | All container images are signed with Cosign. Verify with: 187 | 188 | ```bash 189 | cosign verify \ 190 | --certificate-identity "https://github.com/tuananh/hyper-mcp/.github/workflows/release.yml@refs/tags/${{ github.ref_name }}" \ 191 | --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ 192 | ghcr.io/tuananh/hyper-mcp:${{ github.ref_name }} 193 | ``` 194 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/linux,rust,terraform 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=linux,rust,terraform 3 | 4 | ### Linux ### 5 | *~ 6 | 7 | # temporary files which can be created if a process still has a handle open of a deleted file 8 | .fuse_hidden* 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | ### Rust ### 20 | # Generated by Cargo 21 | # will have compiled files and executables 22 | debug/ 23 | target/ 24 | 25 | # These are backup files generated by rustfmt 26 | **/*.rs.bk 27 | 28 | # MSVC Windows builds of rustc generate these, which store debugging information 29 | *.pdb 30 | 31 | ### Terraform ### 32 | # Local .terraform directories 33 | **/.terraform/* 34 | 35 | # .tfstate files 36 | *.tfstate 37 | *.tfstate.* 38 | 39 | # Crash log files 40 | crash.log 41 | crash.*.log 42 | 43 | # Exclude all .tfvars files, which are likely to contain sensitive data, such as 44 | # password, private keys, and other secrets. These should not be part of version 45 | # control as they are data points which are potentially sensitive and subject 46 | # to change depending on the environment. 47 | *.tfvars 48 | *.tfvars.json 49 | 50 | # Ignore override files as they are usually used to override resources locally and so 51 | # are not checked in 52 | override.tf 53 | override.tf.json 54 | *_override.tf 55 | *_override.tf.json 56 | 57 | # Include override files you do wish to add to version control using negated pattern 58 | # !example_override.tf 59 | 60 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 61 | # example: *tfplan* 62 | 63 | # Ignore CLI configuration files 64 | .terraformrc 65 | terraform.rc 66 | 67 | # End of https://www.toptal.com/developers/gitignore/api/linux,rust,terraform 68 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "examples/plugins/meme-generator/assets"] 2 | path = examples/plugins/meme-generator/assets 3 | url = https://github.com/tuananh/meme-generator-assets.git 4 | -------------------------------------------------------------------------------- /.windsurf/rules/print-ctx-size.md: -------------------------------------------------------------------------------- 1 | --- 2 | trigger: always_on 3 | description: 4 | globs: 5 | --- 6 | 7 | # Your rule content 8 | 9 | - End every request with "Total context size: ~nk tokens" and list the files you have in view. -------------------------------------------------------------------------------- /.windsurf/rules/think.md: -------------------------------------------------------------------------------- 1 | --- 2 | trigger: model_decision 3 | --- 4 | 5 | After any context change (viewing new files, running commands, or receiving tool outputs), use the "think" tool to organize your reasoning before responding. 6 | 7 | Specifically, always use the think tool when: 8 | - After examining file contents or project structure 9 | - After running terminal commands or analyzing their outputs 10 | - After receiving search results or API responses 11 | - Before making code suggestions or explaining complex concepts 12 | - When transitioning between different parts of a task 13 | 14 | When using the think tool: 15 | - List the specific rules or constraints that apply to the current task 16 | - Check if all required information is collected 17 | - Verify that your planned approach is correct 18 | - Break down complex problems into clearly defined steps 19 | - Analyze outputs from other tools thoroughly 20 | - Plan multi-step approaches before executing them 21 | 22 | The think tool has been proven to improve performance by up to 54% on complex tasks, especially when working with multiple tools or following detailed policies. -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hyper-mcp" 3 | version = "0.1.3" 4 | edition = "2024" 5 | authors = ["Tuan Anh Tran "] 6 | description = " A fast, secure MCP server that extends its capabilities through WebAssembly plugins" 7 | keywords = ["rust", "ai", "mcp", "cli"] 8 | categories = ["command-line-utilities"] 9 | readme = "README.md" 10 | license = "Apache-2.0" 11 | repository = "https://github.com/tuananh/hyper-mcp" 12 | homepage = "https://github.com/tuananh/hyper-mcp" 13 | documentation = "https://github.com/tuananh/hyper-mcp" 14 | 15 | [dependencies] 16 | tokio = { version = "=1.45.1", features = ["full"] } 17 | serde = { version = "=1.0.219", features = ["derive"] } 18 | serde_json = "=1.0.140" 19 | reqwest = { version = "=0.12.19", features = ["json"] } 20 | anyhow = "=1.0.98" 21 | extism = "=1.11.1" 22 | sha2 = "=0.10.9" 23 | hex = "=0.4.3" 24 | oci-client = "=0.15.0" 25 | tar = "=0.4.44" 26 | flate2 = "=1.1.2" 27 | clap = { version = "=4.5.39", features = ["derive", "env"] } 28 | dirs = "=6.0.0" 29 | docker_credential = "=1.3.2" 30 | log = "=0.4.27" 31 | sigstore = { version = "=0.12.1", features = ["cosign", "verify", "bundle"] } 32 | tracing = "=0.1.41" 33 | tracing-subscriber = { version = "=0.3.19", features = ["env-filter"] } 34 | rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "a66f66ae345a0fafde1e2ee496ec137d77aef82a", features = ["server", "transport-io", "transport-sse-server", "transport-streamable-http-server"] } 35 | serde_yaml = "=0.9.34" 36 | toml = "=0.8.23" 37 | bytesize = "=2.0.1" 38 | axum = "=0.8.4" 39 | 40 | [[bin]] 41 | name = "hyper-mcp" 42 | path = "src/main.rs" 43 | -------------------------------------------------------------------------------- /DEPLOYMENT.md: -------------------------------------------------------------------------------- 1 | Deployment 2 | ========== 3 | 4 | ## Docker 5 | 6 | Assume you have Docker installed. 7 | 8 | Pull the image 9 | 10 | ```sh 11 | docker pull ghcr.io/tuananh/hyper-mcp:latest 12 | ``` 13 | 14 | Create a sample config file like this, assume at `/home/ubuntu/config.yml` 15 | 16 | ```json 17 | { 18 | "plugins": [ 19 | { 20 | "name": "time", 21 | "path": "oci://ghcr.io/tuananh/time-plugin:latest" 22 | }, 23 | { 24 | "name": "qr-code", 25 | "path": "oci://ghcr.io/tuananh/qrcode-plugin:latest" 26 | } 27 | ] 28 | } 29 | ``` 30 | 31 | Run the container 32 | 33 | ```sh 34 | docker run -d \ 35 | --name hyper-mcp \ 36 | -p 3001:3001 \ 37 | -v /home/ubuntu/config.json:/app/config.json \ 38 | ghcr.io/tuananh/hyper-mcp \ 39 | --transport sse \ 40 | --bind-address 0.0.0.0:3001 \ 41 | --config-file /app/config.json 42 | ``` 43 | 44 | Note that we need to bind to `--bind-address 0.0.0.0:3001` in order to access from the host. 45 | 46 | ## GCP Cloud Run 47 | 48 | ### Prerequisites 49 | - Google Cloud SDK installed 50 | - Terraform installed 51 | - A GCP project with Cloud Run and Secret Manager APIs enabled 52 | 53 | ### Configuration 54 | 55 | 1. Create a `terraform.tfvars` file with your configuration in `iac` folder: 56 | 57 | ```hcl 58 | name = "hyper-mcp" 59 | project_id = "your-project-id" 60 | region = "asia-southeast1" # or your preferred region 61 | ``` 62 | 63 | 2. Create a config file in Secret Manager: 64 | 65 | The config file will be automatically created and managed by Terraform. Here's an example of what it contains: 66 | 67 | ```json 68 | { 69 | "plugins": [ 70 | { 71 | "name": "time", 72 | "path": "oci://ghcr.io/tuananh/time-plugin:latest" 73 | }, 74 | { 75 | "name": "qr-code", 76 | "path": "oci://ghcr.io/tuananh/qrcode-plugin:latest" 77 | } 78 | ] 79 | } 80 | ``` 81 | 82 | 3. Deploy using Terraform: 83 | 84 | ```sh 85 | cd iac 86 | terraform init 87 | terraform plan 88 | terraform apply 89 | ``` 90 | 91 | The service will be deployed with: 92 | - Port 3001 exposed 93 | - Config file mounted at `/app/config.json` 94 | - Public access enabled 95 | - SSE transport mode 96 | - Bound to 0.0.0.0:3001 97 | 98 | ### Accessing the Service 99 | 100 | After deployment, you can get the service URL using: 101 | 102 | ```sh 103 | terraform output url 104 | ``` 105 | 106 | The service will be accessible at the provided URL. 107 | 108 | ## Cloudflare Workers 109 | 110 | Not possible yet but it's in my TODO list. -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=$BUILDPLATFORM rust:1.86 AS builder 2 | WORKDIR /app 3 | RUN cargo install cargo-auditable 4 | 5 | COPY Cargo.toml Cargo.lock ./ 6 | RUN cargo fetch 7 | COPY src ./src 8 | RUN cargo auditable build --release --locked 9 | 10 | FROM gcr.io/distroless/cc-debian12 11 | 12 | LABEL org.opencontainers.image.authors="me@tuananh.org" \ 13 | org.opencontainers.image.url="https://github.com/tuananh/hyper-mcp" \ 14 | org.opencontainers.image.source="https://github.com/tuananh/hyper-mcp" \ 15 | org.opencontainers.image.vendor="github.com/tuananh/hyper-mcp" 16 | 17 | WORKDIR /app 18 | COPY --from=builder /app/target/release/hyper-mcp /usr/local/bin/hyper-mcp 19 | ENTRYPOINT ["/usr/local/bin/hyper-mcp"] 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | [![Rust](https://img.shields.io/badge/rust-%23000000.svg?logo=rust&logoColor=white)](https://crates.io/crates/hyper-mcp) 4 | [![License](https://img.shields.io/badge/License-Apache--2.0-blue)](#license) 5 | [![Issues - hyper-mcp](https://img.shields.io/github/issues/tuananh/hyper-mcp)](https://github.com/tuananh/hyper-mcp/issues) 6 | ![GitHub Release](https://img.shields.io/github/v/release/tuananh/hyper-mcp) 7 | 8 |
9 | 10 |
11 | 12 | hyper-mcp logo 13 | 14 |
15 | 16 | # hyper-mcp 17 | 18 | A fast, secure MCP server that extends its capabilities through WebAssembly plugins. 19 | 20 | ## What is it? 21 | 22 | hyper-mcp makes it easy to add AI capabilities to your applications. It works with Claude Desktop, Cursor IDE, and other MCP-compatible apps. Write plugins in your favorite language, distribute them through container registries, and run them anywhere - from cloud to edge. 23 | 24 | ## Features 25 | 26 | - Write plugins in any language that compiles to WebAssembly 27 | - Distribute plugins via standard OCI registries (like Docker Hub) 28 | - Built on [Extism](https://github.com/extism/extism) for rock-solid plugin support 29 | - Lightweight enough for resource-constrained environments 30 | - Support all 3 protocols in the spec: `stdio`, `sse` and `streamble-http`. 31 | - Deploy anywhere: serverless, edge, mobile, IoT devices 32 | - Cross-platform compatibility out of the box 33 | 34 | ## Security 35 | 36 | Built with security-first mindset: 37 | 38 | - Sandboxed plugins that can't access your system without permission 39 | - Memory-safe execution with resource limits 40 | - Secure plugin distribution through container registries 41 | - Fine-grained access control for host functions 42 | - OCI plugin images are signed at publish time and verified at load time with [sigstore](https://www.sigstore.dev/). 43 | 44 | ## Getting Started 45 | 46 | 1. Create your config file: 47 | - Linux: `$HOME/.config/hyper-mcp/config.json` 48 | - Windows: `{FOLDERID_RoamingAppData}`. Eg: `C:\Users\Alice\AppData\Roaming` 49 | - macOS: `$HOME/Library/Application Support/hyper-mcp/config.json` 50 | 51 | ```json 52 | { 53 | "plugins": [ 54 | { 55 | "name": "time", 56 | "path": "oci://ghcr.io/tuananh/time-plugin:latest" 57 | }, 58 | { 59 | "name": "qr-code", 60 | "path": "oci://ghcr.io/tuananh/qrcode-plugin:latest" 61 | }, 62 | { 63 | "name": "hash", 64 | "path": "oci://ghcr.io/tuananh/hash-plugin:latest" 65 | }, 66 | { 67 | "name": "myip", 68 | "path": "oci://ghcr.io/tuananh/myip-plugin:latest", 69 | "runtime_config": { 70 | "allowed_hosts": ["1.1.1.1"] 71 | } 72 | }, 73 | { 74 | "name": "fetch", 75 | "path": "oci://ghcr.io/tuananh/fetch-plugin:latest", 76 | "runtime_config": { 77 | "allowed_hosts": ["*"], 78 | "memory_limit": "100 MB" 79 | } 80 | } 81 | ] 82 | } 83 | ``` 84 | 85 | 2. Start the server: 86 | 87 | ```sh 88 | $ hyper-mcp 89 | ``` 90 | 91 | - By default, it will use `stdio` transport. If you want to use SSE, use flag `--transport sse` or streamable HTTP with `--transport streamable-http`. 92 | - If you want to debug, use `RUST_LOG=info`. 93 | - If you're loading unsigned OCI plugin, you need to set `insecure_skip_signature` flag or env var `HYPER_MCP_INSECURE_SKIP_SIGNATURE` to `true` 94 | 95 | ## Using with Cursor IDE 96 | 97 | You can configure hyper-mcp either globally for all projects or specifically for individual projects. 98 | 99 | 1. For project-scope configuration, create `.cursor/mcp.json` in your project root: 100 | ```json 101 | { 102 | "mcpServers": { 103 | "hyper-mcp": { 104 | "command": "/path/to/hyper-mcp" 105 | } 106 | } 107 | } 108 | ``` 109 | 110 | 2. Set up hyper-mcp in Cursor's settings: 111 | ![cursor mcp](./assets/cursor-mcp.png) 112 | 113 | 3. Start using tools through chat: 114 | ![cursor mcp chat](./assets/cursor-mcp-1.png) 115 | 116 | ## Available Plugins 117 | 118 | We maintain several example plugins to get you started: 119 | 120 | - [time](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/time): Get current time and do time calculations (Rust) 121 | - [qr-code](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/qr-code): Generate QR codes (Rust) 122 | - [hash](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/hash): Generate various types of hashes (Rust) 123 | - [myip](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/myip): Get your current IP (Rust) 124 | - [fetch](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/fetch): Basic webpage fetching (Rust) 125 | - [crypto-price](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/crypto-price): Get cryptocurrency prices (Go) 126 | - [fs](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/fs): File system operations (Rust) 127 | - [github](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/github): GitHub plugin (Go) 128 | - [eval-py](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/eval-py): Evaluate Python code with RustPython (Rust) 129 | - [arxiv](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/arxiv): Search & download arXiv papers (Rust) 130 | - [memory](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/memory): Let you store & retrieve memory, powered by SQLite (Rust) 131 | - [sqlite](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/sqlite): Interact with SQLite (Rust) 132 | - [crates-io](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/crates-io): Get crate general information, check crate latest version (Rust) 133 | - [gomodule](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/gomodule): Get Go modules info, version (Rust) 134 | - [qdrant](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/qdrant): keeping & retrieving memories to Qdrant vector search engine (Rust) 135 | - [gitlab](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/gitlab): GitLab plugin (Rust) 136 | - [meme-generator](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/meme-generator): Meme generator (Rust) 137 | - [context7](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/context7): Lookup library documentation (Rust) 138 | - [think](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins/think): Think tool(Rust) 139 | 140 | 141 | ### Community-built plugins 142 | 143 | - [hackernews](https://github.com/hungran/hyper-mcp-hackernews-tool): This plugin connects to the Hacker News API to fetch the current top stories and display them with their titles, scores, authors, and URLs. 144 | - [release-monitor-id](https://github.com/ntheanh201/hyper-mcp-release-monitor-id-tool): This plugin retrieves project ID from release-monitoring.org, which helps track versions of released software. 145 | - [yahoo-finance](https://github.com/phamngocquy/hyper-mcp-yfinance): This plugin connects to the Yahoo Finance API to provide stock prices (OHLCV) based on a company name or ticker symbol. 146 | - [rand16](https://github.com/dabevlohn/rand16): This plugen generates random 16 bytes buffer and provides it in base64uri format - very usable for symmetric cryptography online. 147 | 148 | ## Creating Plugins 149 | 150 | Check out our [example plugins](https://github.com/tuananh/hyper-mcp/tree/main/examples/plugins) to learn how to build your own. 151 | 152 | To publish a plugin: 153 | 154 | ```dockerfile 155 | # example how to build with rust 156 | FROM rust:1.86-slim AS builder 157 | 158 | RUN rustup target add wasm32-wasip1 && \ 159 | rustup component add rust-std --target wasm32-wasip1 && \ 160 | cargo install cargo-auditable 161 | 162 | WORKDIR /workspace 163 | COPY . . 164 | RUN cargo fetch 165 | RUN cargo auditable build --release --target wasm32-wasip1 166 | 167 | FROM scratch 168 | WORKDIR / 169 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 170 | 171 | ``` 172 | 173 | Then build and push: 174 | ```sh 175 | docker build -t your-registry/plugin-name . 176 | docker push your-registry/plugin-name 177 | ``` 178 | 179 | ## License 180 | 181 | [Apache 2.0](./LICENSE) 182 | 183 | ## Star History 184 | 185 | [![Star History Chart](https://api.star-history.com/svg?repos=tuananh/hyper-mcp&type=Date)](https://www.star-history.com/#tuananh/hyper-mcp&Date) 186 | -------------------------------------------------------------------------------- /RUNTIME_CONFIG.md: -------------------------------------------------------------------------------- 1 | # Runtime Configuration 2 | 3 | ## Structure 4 | 5 | The configuration is structured as follows: 6 | 7 | - **plugins**: An array of plugin configuration objects. 8 | - **name** (`string`): Name of the plugin. 9 | - **path** (`string`): OCI path or HTTP URL or local path for the plugin. 10 | - **runtime_config** (`object`, optional): Plugin-specific runtime configuration. The available fields are: 11 | - **skip_tools** (`array[string]`, optional): List of tool names to skip loading at runtime. 12 | - **allowed_hosts** (`array[string]`, optional): List of allowed hosts for the plugin (e.g., `["1.1.1.1"]` or `["*"]`). 13 | - **allowed_paths** (`array[string]`, optional): List of allowed file system paths. 14 | - **env_vars** (`object`, optional): Key-value pairs of environment variables for the plugin. 15 | - **memory_limit** (`string`, optional): Memory limit for the plugin (e.g., `"512Mi"`). 16 | 17 | ## Example (YAML) 18 | 19 | ```yaml 20 | plugins: 21 | - name: time 22 | path: oci://ghcr.io/tuananh/time-plugin:latest 23 | - name: myip 24 | path: oci://ghcr.io/tuananh/myip-plugin:latest 25 | runtime_config: 26 | allowed_hosts: 27 | - "1.1.1.1" 28 | skip_tools: 29 | - "debug" 30 | env_vars: 31 | FOO: "bar" 32 | memory_limit: "512Mi" 33 | ``` 34 | 35 | ## Example (JSON) 36 | 37 | ```json 38 | { 39 | "plugins": [ 40 | { 41 | "name": "time", 42 | "path": "oci://ghcr.io/tuananh/time-plugin:latest" 43 | }, 44 | { 45 | "name": "myip", 46 | "path": "oci://ghcr.io/tuananh/myip-plugin:latest", 47 | "runtime_config": { 48 | "allowed_hosts": ["1.1.1.1"], 49 | "skip_tools": ["debug"], 50 | "env_vars": {"FOO": "bar"}, 51 | "memory_limit": "512Mi" 52 | } 53 | } 54 | ] 55 | } 56 | ``` 57 | 58 | ## Loading Configuration 59 | 60 | Configuration is loaded at runtime from a file with `.json`, `.yaml`, `.yml`, or `.toml` extension. The loader will parse the file according to its extension. If the file does not exist or the format is unsupported, an error will be raised. 61 | 62 | ## Notes 63 | 64 | - Fields marked as `optional` can be omitted. 65 | - Plugin authors may extend `runtime_config` with additional fields, but only the above are officially recognized. 66 | -------------------------------------------------------------------------------- /assets/cursor-mcp-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/assets/cursor-mcp-1.png -------------------------------------------------------------------------------- /assets/cursor-mcp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/assets/cursor-mcp.png -------------------------------------------------------------------------------- /assets/eval-py.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/assets/eval-py.jpg -------------------------------------------------------------------------------- /assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/assets/logo.png -------------------------------------------------------------------------------- /config.example.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | { 4 | "name": "time", 5 | "path": "oci://ghcr.io/tuananh/time-plugin:latest" 6 | }, 7 | { 8 | "name": "qr-code", 9 | "path": "oci://ghcr.io/tuananh/qrcode-plugin:latest" 10 | }, 11 | { 12 | "name": "hash", 13 | "path": "oci://ghcr.io/tuananh/hash-plugin:latest" 14 | }, 15 | { 16 | "name": "myip", 17 | "path": "oci://ghcr.io/tuananh/myip-plugin:latest", 18 | "runtime_config": { 19 | "allowed_hosts": ["1.1.1.1"] 20 | } 21 | }, 22 | { 23 | "name": "fetch", 24 | "path": "oci://ghcr.io/tuananh/fetch-plugin:latest", 25 | "runtime_config": { 26 | "allowed_hosts": ["*"] 27 | } 28 | } 29 | ] 30 | } -------------------------------------------------------------------------------- /config.example.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | plugins: 3 | - name: time 4 | path: oci://ghcr.io/tuananh/time-plugin:latest 5 | - name: qr-code 6 | path: oci://ghcr.io/tuananh/qrcode-plugin:latest 7 | - name: hash 8 | path: oci://ghcr.io/tuananh/hash-plugin:latest 9 | - name: myip 10 | path: oci://ghcr.io/tuananh/myip-plugin:latest 11 | runtime_config: 12 | allowed_hosts: 13 | - "1.1.1.1" 14 | - name: fetch 15 | path: oci://ghcr.io/tuananh/fetch-plugin:latest 16 | runtime_config: 17 | allowed_hosts: 18 | - "*" 19 | -------------------------------------------------------------------------------- /examples/plugins/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/examples/plugins/.gitkeep -------------------------------------------------------------------------------- /examples/plugins/arxiv/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/arxiv/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/arxiv/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "arxiv" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | chrono = { version = "0.4", features = ["serde"] } 15 | feed-rs = "1.3" 16 | urlencoding = "2.1" 17 | base64 = "0.21" 18 | base64-serde = "0.8.0" 19 | -------------------------------------------------------------------------------- /examples/plugins/arxiv/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/arxiv/README.md: -------------------------------------------------------------------------------- 1 | # arxiv 2 | 3 | A plugin that let you search for papers on arXiv and download them. 4 | 5 | ## Usage 6 | 7 | Call with: 8 | ```json 9 | { 10 | "plugins": [ 11 | // {}, 12 | { 13 | "name": "arxiv", 14 | "path": "/home/anh/Code/hyper-mcp/examples/plugins/arxiv/target/wasm32-wasip1/release/plugin.wasm", 15 | "runtime_config": { 16 | "allowed_hosts": ["export.arxiv.org", "arxiv.org"], 17 | "allowed_paths": ["/tmp"] 18 | } 19 | } 20 | ] 21 | } 22 | 23 | ``` 24 | -------------------------------------------------------------------------------- /examples/plugins/arxiv/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use chrono::{DateTime, Utc}; 4 | use extism_pdk::*; 5 | use pdk::types::{ 6 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, Role, TextAnnotation, 7 | ToolDescription, 8 | }; 9 | use serde::{Deserialize, Serialize}; 10 | use serde_json::json; 11 | #[derive(Debug, Serialize, Deserialize)] 12 | struct Paper { 13 | paper_id: String, 14 | title: String, 15 | authors: Vec, 16 | abstract_text: String, 17 | url: String, 18 | pdf_url: String, 19 | published_date: DateTime, 20 | updated_date: DateTime, 21 | source: String, 22 | categories: Vec, 23 | keywords: Vec, 24 | doi: String, 25 | } 26 | 27 | pub(crate) fn call(input: CallToolRequest) -> Result { 28 | match input.params.name.as_str() { 29 | "arxiv_search" => search(input), 30 | "arxiv_download_pdf" => download_pdf(input), 31 | _ => Ok(CallToolResult { 32 | is_error: Some(true), 33 | content: vec![Content { 34 | annotations: None, 35 | text: Some(format!("Unknown tool: {}", input.params.name)), 36 | mime_type: None, 37 | r#type: ContentType::Text, 38 | data: None, 39 | }], 40 | }), 41 | } 42 | } 43 | 44 | fn search(input: CallToolRequest) -> Result { 45 | let args = input.params.arguments.unwrap_or_default(); 46 | let query = match args.get("query") { 47 | Some(v) if v.is_string() => v.as_str().unwrap(), 48 | _ => return Err(Error::msg("query parameter is required")), 49 | }; 50 | 51 | let max_results = args 52 | .get("max_results") 53 | .and_then(|v| v.as_u64()) 54 | .unwrap_or(10); 55 | 56 | let req = HttpRequest { 57 | url: format!( 58 | "http://export.arxiv.org/api/query?search_query={}&max_results={}&sortBy=submittedDate&sortOrder=descending", 59 | urlencoding::encode(query), 60 | max_results 61 | ), 62 | headers: [( 63 | "User-Agent".to_string(), 64 | "hyper-mcp/1.0 (https://github.com/tuananh/hyper-mcp)".to_string(), 65 | )] 66 | .into_iter() 67 | .collect(), 68 | method: Some("GET".to_string()), 69 | }; 70 | 71 | let res = http::request::<()>(&req, None)?; 72 | 73 | let body = res.body(); 74 | let xml = String::from_utf8_lossy(body.as_slice()); 75 | 76 | let feed = match feed_rs::parser::parse(xml.as_bytes()) { 77 | Ok(feed) => feed, 78 | Err(e) => return Err(Error::msg(format!("Failed to parse arXiv feed: {}", e))), 79 | }; 80 | 81 | let mut papers = Vec::new(); 82 | for entry in feed.entries { 83 | let paper_id = entry.id.split("/abs/").last().unwrap_or("").to_string(); 84 | 85 | let authors = entry 86 | .authors 87 | .iter() 88 | .map(|author| author.name.clone()) 89 | .collect(); 90 | 91 | let categories = entry 92 | .categories 93 | .iter() 94 | .map(|cat| cat.term.clone()) 95 | .collect(); 96 | 97 | papers.push(Paper { 98 | paper_id, 99 | title: entry.title.map(|t| t.content).unwrap_or_default(), 100 | authors, 101 | abstract_text: entry.content.and_then(|c| c.body).unwrap_or_default(), 102 | url: entry 103 | .links 104 | .iter() 105 | .find(|l| l.rel == Some("alternate".to_string())) 106 | .map(|l| l.href.clone()) 107 | .unwrap_or_default(), 108 | pdf_url: entry 109 | .links 110 | .iter() 111 | .find(|l| l.media_type.as_deref() == Some("application/pdf")) 112 | .map(|l| l.href.clone()) 113 | .unwrap_or_default(), 114 | published_date: entry.published.unwrap_or_default(), 115 | updated_date: entry.updated.unwrap_or_default(), 116 | source: "arxiv".to_string(), 117 | categories, 118 | keywords: Vec::new(), 119 | doi: String::new(), 120 | }); 121 | } 122 | 123 | Ok(CallToolResult { 124 | is_error: None, 125 | content: vec![Content { 126 | annotations: None, 127 | text: Some(serde_json::to_string(&papers)?), 128 | mime_type: Some("application/json".to_string()), 129 | r#type: ContentType::Text, 130 | data: None, 131 | }], 132 | }) 133 | } 134 | 135 | fn download_pdf(input: CallToolRequest) -> Result { 136 | let args = input.params.arguments.unwrap_or_default(); 137 | let paper_id = match args.get("paper_id") { 138 | Some(v) if v.is_string() => v.as_str().unwrap(), 139 | _ => return Err(Error::msg("paper_id parameter is required")), 140 | }; 141 | 142 | // Get the path parameter with default to /tmp 143 | let save_path = args 144 | .get("save_path") 145 | .and_then(|v| v.as_str()) 146 | .unwrap_or("/tmp"); 147 | 148 | // Clean up the paper ID in case it contains the full URL 149 | let clean_paper_id = if paper_id.contains("/") { 150 | paper_id.split('/').next_back().unwrap_or(paper_id) 151 | } else { 152 | paper_id 153 | }; 154 | 155 | let url = format!("https://arxiv.org/pdf/{}", clean_paper_id); 156 | 157 | let req = HttpRequest { 158 | url, 159 | headers: [ 160 | ( 161 | "User-Agent".to_string(), 162 | "Mozilla/5.0 (compatible; hyper-mcp/1.0)".to_string(), 163 | ), 164 | ("Accept".to_string(), "application/pdf".to_string()), 165 | ] 166 | .into_iter() 167 | .collect(), 168 | method: Some("GET".to_string()), 169 | }; 170 | 171 | let res = match http::request::<()>(&req, None) { 172 | Ok(r) => r, 173 | Err(e) => return Err(Error::msg(format!("HTTP request failed: {}", e))), 174 | }; 175 | 176 | let pdf_data = res.body(); 177 | if pdf_data.is_empty() { 178 | return Err(Error::msg("Received empty PDF data from arXiv")); 179 | } 180 | 181 | let file_path = format!("{}/{}.pdf", save_path.trim_end_matches('/'), clean_paper_id); 182 | match std::fs::write(&file_path, &pdf_data) { 183 | Ok(_) => (), 184 | Err(e) => { 185 | return Err(Error::msg(format!( 186 | "Failed to write PDF to {}: {}", 187 | file_path, e 188 | ))); 189 | } 190 | } 191 | 192 | // let pdf_base64 = base64::engine::general_purpose::STANDARD.encode(pdf_data); 193 | 194 | // TODO: actually return a resource 195 | Ok(CallToolResult { 196 | is_error: None, 197 | content: vec![Content { 198 | annotations: Some(TextAnnotation { 199 | audience: vec![Role::User, Role::Assistant], 200 | priority: 1.0, 201 | }), 202 | text: Some(format!("PDF saved to: {}", file_path)), 203 | mime_type: None, 204 | data: None, 205 | r#type: ContentType::Text, 206 | }], 207 | }) 208 | } 209 | 210 | pub(crate) fn describe() -> Result { 211 | Ok(ListToolsResult { 212 | tools: vec![ 213 | ToolDescription { 214 | name: "arxiv_search".into(), 215 | description: "Search for papers on arXiv".into(), 216 | input_schema: json!({ 217 | "type": "object", 218 | "properties": { 219 | "query": { 220 | "type": "string", 221 | "description": "The search query", 222 | }, 223 | "max_results": { 224 | "type": "integer", 225 | "description": "Maximum number of results to return (default: 10)", 226 | } 227 | }, 228 | "required": ["query"], 229 | }) 230 | .as_object() 231 | .unwrap() 232 | .clone(), 233 | }, 234 | ToolDescription { 235 | name: "arxiv_download_pdf".into(), 236 | description: "Download a paper's PDF from arXiv".into(), 237 | input_schema: json!({ 238 | "type": "object", 239 | "properties": { 240 | "paper_id": { 241 | "type": "string", 242 | "description": "The arXiv paper ID", 243 | }, 244 | "save_path": { 245 | "type": "string", 246 | "description": "Path to save the PDF file (default: /tmp)", 247 | } 248 | }, 249 | "required": ["paper_id"], 250 | }) 251 | .as_object() 252 | .unwrap() 253 | .clone(), 254 | }, 255 | ], 256 | }) 257 | } 258 | -------------------------------------------------------------------------------- /examples/plugins/context7/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/context7/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/context7/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "context7" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0.219", features = ["derive"] } 13 | serde_json = "1.0.140" 14 | base64-serde = "0.8.0" 15 | base64 = "0.22.1" 16 | htmd = "0.1.6" 17 | urlencoding = "2.1.3" 18 | -------------------------------------------------------------------------------- /examples/plugins/context7/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/context7/README.md: -------------------------------------------------------------------------------- 1 | # Context7 API Tools Plugin 2 | 3 | This plugin provides tools to interact with the Context7 API, allowing for resolving library IDs and fetching documentation. 4 | 5 | ## Usage 6 | 7 | ``` 8 | { 9 | "plugins": [ 10 | { 11 | "name": "context7", 12 | "path": "oci://ghcr.io/tuananh/context7-plugin:nightly", 13 | "runtime_config": { 14 | "allowed_hosts": ["context7.com"] 15 | } 16 | } 17 | ] 18 | } 19 | ``` 20 | 21 | ## Tools 22 | 23 | ### 1. `c7_resolve_library_id` 24 | 25 | **Description:** Resolves a package name to a Context7-compatible library ID and returns a list of matching libraries. You MUST call this function before 'c7_get_library_docs' to obtain a valid Context7-compatible library ID. When selecting the best match, consider: - Name similarity to the query - Description relevance - Code Snippet count (documentation coverage) - GitHub Stars (popularity) Return the selected library ID and explain your choice. If there are multiple good matches, mention this but proceed with the most relevant one. 26 | 27 | **Input Schema:** 28 | An object with the following properties: 29 | - `library_name` (string, required): The general name of the library (e.g., 'React', 'upstash/redis'). 30 | 31 | **Example Input:** 32 | ```json 33 | { 34 | "library_name": "upstash/redis" 35 | } 36 | ``` 37 | 38 | **Output:** 39 | A JSON string containing the resolved Context7 compatible library ID. 40 | 41 | **Example Output:** 42 | ```json 43 | { 44 | "context7_compatible_library_id": "upstash_redis_id" 45 | } 46 | ``` 47 | 48 | ### 2. `c7_get_library_docs` 49 | 50 | **Description:** Fetches up-to-date documentation for a library. You must call 'c7_resolve_library_id' first to obtain the exact Context7-compatible library ID required to use this tool. 51 | 52 | **Input Schema:** 53 | An object with the following properties: 54 | - `context7_compatible_library_id` (string, required): The Context7-compatible ID for the library. 55 | - `topic` (string, optional): Focus the docs on a specific topic (e.g., 'routing', 'hooks'). 56 | - `tokens` (integer, optional): Max number of tokens for the documentation (default: 10000). 57 | 58 | **Example Input:** 59 | ```json 60 | { 61 | "context7_compatible_library_id": "upstash_redis_id", 62 | "topic": "data_types", 63 | "tokens": 5000 64 | } 65 | ``` 66 | 67 | **Output:** 68 | 69 | The fetched documentation in text format. 70 | -------------------------------------------------------------------------------- /examples/plugins/crates-io/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/crates-io/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/crates-io/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "crates-io" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0.219", features = ["derive"] } 13 | serde_json = "1.0.140" 14 | base64-serde = "0.8.0" 15 | base64 = "0.22.1" 16 | -------------------------------------------------------------------------------- /examples/plugins/crates-io/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/crates-io/README.md: -------------------------------------------------------------------------------- 1 | # crates-io 2 | 3 | A plugin that fetches crate information and latest versions from crates.io. 4 | 5 | ## What it does 6 | 7 | Provides two main functionalities: 8 | 1. `crates_io_latest_version`: Fetches the latest version of multiple crates 9 | 2. `crates_io_crate_info`: Fetches detailed information about multiple crates including description, downloads, repository, documentation, etc. 10 | 11 | ## Usage 12 | 13 | Call with: 14 | ```json 15 | { 16 | "plugins": [ 17 | { 18 | "name": "crates-io", 19 | "path": "oci://ghcr.io/tuananh/crates-io-plugin:latest", 20 | "runtime_config": { 21 | "allowed_hosts": ["crates.io"] 22 | } 23 | } 24 | ] 25 | } 26 | ``` 27 | 28 | ### Example Usage 29 | 30 | 1. Get latest version of multiple crates: 31 | ```json 32 | { 33 | "name": "crates_io_latest_version", 34 | "params": { 35 | "crate_names": "serde,tokio,clap" 36 | } 37 | } 38 | ``` 39 | 40 | 2. Get detailed information about multiple crates: 41 | ```json 42 | { 43 | "name": "crates_io_crate_info", 44 | "params": { 45 | "crate_names": "serde,tokio,clap" 46 | } 47 | } 48 | ``` 49 | 50 | Returns: 51 | - For `crates_io_latest_version`: A JSON object mapping crate names to their latest version numbers 52 | - For `crates_io_crate_info`: An array of JSON objects containing detailed crate information for each crate, including: 53 | - Name 54 | - Description 55 | - Latest version 56 | - Download count 57 | - Repository URL 58 | - Documentation URL 59 | - Homepage URL 60 | - Keywords 61 | - Categories 62 | - License 63 | - Creation and update timestamps -------------------------------------------------------------------------------- /examples/plugins/crates-io/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use std::collections::BTreeMap; 4 | 5 | use extism_pdk::*; 6 | use json::Value; 7 | use pdk::types::{ 8 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 9 | }; 10 | use serde_json::json; 11 | 12 | pub(crate) fn call(input: CallToolRequest) -> Result { 13 | match input.params.name.as_str() { 14 | "crates_io_latest_version" => latest_version(input), 15 | "crates_io_crate_info" => crate_info(input), 16 | _ => Ok(CallToolResult { 17 | is_error: Some(true), 18 | content: vec![Content { 19 | annotations: None, 20 | text: Some(format!("Unknown tool: {}", input.params.name)), 21 | mime_type: None, 22 | r#type: ContentType::Text, 23 | data: None, 24 | }], 25 | }), 26 | } 27 | } 28 | 29 | fn crate_info(input: CallToolRequest) -> Result { 30 | let args = input.params.arguments.unwrap_or_default(); 31 | if let Some(Value::String(crate_names)) = args.get("crate_names") { 32 | let crate_names: Vec<&str> = crate_names.split(',').map(|s| s.trim()).collect(); 33 | let mut results = Vec::new(); 34 | 35 | for crate_name in crate_names { 36 | // Create HTTP request to crates.io API 37 | let mut req = HttpRequest { 38 | url: format!("https://crates.io/api/v1/crates/{}", crate_name), 39 | headers: BTreeMap::new(), 40 | method: Some("GET".to_string()), 41 | }; 42 | 43 | // Add a user agent header to be polite 44 | req.headers 45 | .insert("User-Agent".to_string(), "crates-io-tool/1.0".to_string()); 46 | 47 | // Perform the request 48 | let res = http::request::<()>(&req, None)?; 49 | 50 | // Convert response body to string 51 | let body = res.body(); 52 | let json_str = String::from_utf8_lossy(body.as_slice()); 53 | 54 | // Parse the JSON response 55 | let json: serde_json::Value = serde_json::from_str(&json_str)?; 56 | 57 | if let Some(crate_info) = json["crate"].as_object() { 58 | // Extract relevant information with null checks 59 | let info = json!({ 60 | "name": crate_info.get("name").and_then(|v| v.as_str()), 61 | "description": crate_info.get("description").and_then(|v| v.as_str()), 62 | "latest_version": crate_info.get("max_version").and_then(|v| v.as_str()), 63 | "downloads": crate_info.get("downloads").and_then(|v| v.as_i64()), 64 | "repository": crate_info.get("repository").and_then(|v| v.as_str()), 65 | "documentation": crate_info.get("documentation").and_then(|v| v.as_str()), 66 | "homepage": crate_info.get("homepage").and_then(|v| v.as_str()), 67 | "keywords": crate_info.get("keywords").and_then(|v| v.as_array()), 68 | "categories": crate_info.get("categories").and_then(|v| v.as_array()), 69 | "license": json["versions"].as_array().and_then(|v| v.first()).and_then(|v| v["license"].as_str()), 70 | "created_at": crate_info.get("created_at").and_then(|v| v.as_str()), 71 | "updated_at": crate_info.get("updated_at").and_then(|v| v.as_str()), 72 | }); 73 | 74 | results.push(info); 75 | } 76 | } 77 | 78 | if !results.is_empty() { 79 | Ok(CallToolResult { 80 | is_error: None, 81 | content: vec![Content { 82 | annotations: None, 83 | text: Some(serde_json::to_string(&results)?), 84 | mime_type: Some("text/plain".to_string()), 85 | r#type: ContentType::Text, 86 | data: None, 87 | }], 88 | }) 89 | } else { 90 | Ok(CallToolResult { 91 | is_error: Some(true), 92 | content: vec![Content { 93 | annotations: None, 94 | text: Some("Failed to get crate information".into()), 95 | mime_type: None, 96 | r#type: ContentType::Text, 97 | data: None, 98 | }], 99 | }) 100 | } 101 | } else { 102 | Ok(CallToolResult { 103 | is_error: Some(true), 104 | content: vec![Content { 105 | annotations: None, 106 | text: Some("Please provide crate names".into()), 107 | mime_type: None, 108 | r#type: ContentType::Text, 109 | data: None, 110 | }], 111 | }) 112 | } 113 | } 114 | 115 | fn latest_version(input: CallToolRequest) -> Result { 116 | let args = input.params.arguments.unwrap_or_default(); 117 | if let Some(Value::String(crate_names)) = args.get("crate_names") { 118 | let crate_names: Vec<&str> = crate_names.split(',').map(|s| s.trim()).collect(); 119 | let mut results = BTreeMap::new(); 120 | 121 | for crate_name in crate_names { 122 | // Create HTTP request to crates.io API 123 | let mut req = HttpRequest { 124 | url: format!("https://crates.io/api/v1/crates/{}", crate_name), 125 | headers: BTreeMap::new(), 126 | method: Some("GET".to_string()), 127 | }; 128 | 129 | // Add a user agent header to be polite 130 | req.headers 131 | .insert("User-Agent".to_string(), "crates-io-tool/1.0".to_string()); 132 | 133 | // Perform the request 134 | let res = http::request::<()>(&req, None)?; 135 | 136 | // Convert response body to string 137 | let body = res.body(); 138 | let json_str = String::from_utf8_lossy(body.as_slice()); 139 | 140 | // Parse the JSON response 141 | let json: serde_json::Value = serde_json::from_str(&json_str)?; 142 | 143 | if let Some(version) = json["crate"]["max_version"].as_str() { 144 | results.insert(crate_name.to_string(), version.to_string()); 145 | } 146 | } 147 | 148 | if !results.is_empty() { 149 | Ok(CallToolResult { 150 | is_error: None, 151 | content: vec![Content { 152 | annotations: None, 153 | text: Some(serde_json::to_string(&results)?), 154 | mime_type: Some("text/plain".to_string()), 155 | r#type: ContentType::Text, 156 | data: None, 157 | }], 158 | }) 159 | } else { 160 | Ok(CallToolResult { 161 | is_error: Some(true), 162 | content: vec![Content { 163 | annotations: None, 164 | text: Some("Failed to get latest versions".into()), 165 | mime_type: None, 166 | r#type: ContentType::Text, 167 | data: None, 168 | }], 169 | }) 170 | } 171 | } else { 172 | Ok(CallToolResult { 173 | is_error: Some(true), 174 | content: vec![Content { 175 | annotations: None, 176 | text: Some("Please provide crate names".into()), 177 | mime_type: None, 178 | r#type: ContentType::Text, 179 | data: None, 180 | }], 181 | }) 182 | } 183 | } 184 | 185 | pub(crate) fn describe() -> Result { 186 | Ok(ListToolsResult { 187 | tools: vec![ 188 | ToolDescription { 189 | name: "crates_io_latest_version".into(), 190 | description: "Fetches the latest version of multiple crates from crates.io".into(), 191 | input_schema: json!({ 192 | "type": "object", 193 | "properties": { 194 | "crate_names": { 195 | "type": "string", 196 | "description": "Comma-separated list of crate names to get the latest versions for", 197 | }, 198 | }, 199 | "required": ["crate_names"], 200 | }) 201 | .as_object() 202 | .unwrap() 203 | .clone(), 204 | }, 205 | ToolDescription { 206 | name: "crates_io_crate_info".into(), 207 | description: "Fetches detailed information about multiple crates from crates.io".into(), 208 | input_schema: json!({ 209 | "type": "object", 210 | "properties": { 211 | "crate_names": { 212 | "type": "string", 213 | "description": "Comma-separated list of crate names to get information for", 214 | }, 215 | }, 216 | "required": ["crate_names"], 217 | }) 218 | .as_object() 219 | .unwrap() 220 | .clone(), 221 | }, 222 | ], 223 | }) 224 | } 225 | -------------------------------------------------------------------------------- /examples/plugins/crypto-price/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tinygo/tinygo:0.37.0 AS builder 2 | 3 | WORKDIR /workspace 4 | COPY go.mod . 5 | COPY go.sum . 6 | RUN go mod download 7 | COPY . . 8 | RUN tinygo build -target wasi -o plugin.wasm . 9 | 10 | FROM scratch 11 | WORKDIR / 12 | COPY --from=builder /workspace/plugin.wasm /plugin.wasm 13 | -------------------------------------------------------------------------------- /examples/plugins/crypto-price/README.md: -------------------------------------------------------------------------------- 1 | # crypto-price 2 | 3 | ## Usage 4 | 5 | ```json 6 | { 7 | "plugins": [ 8 | { 9 | "name": "crypto-price", 10 | "path": "oci://ghcr.io/tuananh/crypto-price-plugin:latest", 11 | "runtime_config": { 12 | "allowed_hosts": ["api.coingecko.com"] 13 | } 14 | } 15 | ] 16 | } 17 | ``` 18 | 19 | ## Notes 20 | 21 | - HTTP request need to use `pdk.NewHTTPRequest`. 22 | 23 | ```go 24 | req := pdk.NewHTTPRequest(pdk.MethodGet, url) 25 | resp := req.Send() 26 | ``` 27 | 28 | - We use `tinygo` for WASI support. 29 | 30 | - Need to export `_Call` as `call` to make it consistent. Same with `describe`. 31 | 32 | ``` 33 | //export call 34 | func _Call() int32 { 35 | ``` -------------------------------------------------------------------------------- /examples/plugins/crypto-price/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/tuananh/hyper-mcp/crypto-price 2 | 3 | go 1.24.1 4 | 5 | require github.com/extism/go-pdk v1.1.3 6 | -------------------------------------------------------------------------------- /examples/plugins/crypto-price/go.sum: -------------------------------------------------------------------------------- 1 | github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ= 2 | github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4= 3 | -------------------------------------------------------------------------------- /examples/plugins/crypto-price/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "errors" 6 | "fmt" 7 | "strings" 8 | 9 | pdk "github.com/extism/go-pdk" 10 | ) 11 | 12 | func Call(input CallToolRequest) (CallToolResult, error) { 13 | args := input.Params.Arguments 14 | if args == nil { 15 | return CallToolResult{}, errors.New("Arguments must be provided") 16 | } 17 | 18 | argsMap := args.(map[string]interface{}) 19 | fmt.Println("argsMap", argsMap) 20 | return getCryptoPrice(argsMap) 21 | } 22 | 23 | func getCryptoPrice(args map[string]interface{}) (CallToolResult, error) { 24 | symbol, ok := args["symbol"].(string) 25 | if !ok { 26 | return CallToolResult{}, errors.New("symbol must be provided") 27 | } 28 | 29 | // Convert symbol to uppercase 30 | symbol = strings.ToUpper(symbol) 31 | 32 | // Use CoinGecko API to get the price 33 | url := fmt.Sprintf("https://api.coingecko.com/api/v3/simple/price?ids=%s&vs_currencies=usd", strings.ToLower(symbol)) 34 | req := pdk.NewHTTPRequest(pdk.MethodGet, url) 35 | resp := req.Send() 36 | 37 | var result map[string]map[string]float64 38 | if err := json.Unmarshal(resp.Body(), &result); err != nil { 39 | return CallToolResult{}, fmt.Errorf("failed to parse response: %v", err) 40 | } 41 | 42 | if price, ok := result[strings.ToLower(symbol)]["usd"]; ok { 43 | priceStr := fmt.Sprintf("%.2f USD", price) 44 | return CallToolResult{ 45 | Content: []Content{ 46 | { 47 | Type: ContentTypeText, 48 | Text: &priceStr, 49 | }, 50 | }, 51 | }, nil 52 | } 53 | 54 | return CallToolResult{}, fmt.Errorf("price not found for %s", symbol) 55 | } 56 | 57 | func Describe() (ListToolsResult, error) { 58 | return ListToolsResult{ 59 | Tools: []ToolDescription{ 60 | { 61 | Name: "crypto-price", 62 | Description: "Get the current price of a cryptocurrency in USD", 63 | InputSchema: map[string]interface{}{ 64 | "type": "object", 65 | "required": []string{"symbol"}, 66 | "properties": map[string]interface{}{ 67 | "symbol": map[string]interface{}{ 68 | "type": "string", 69 | "description": "the cryptocurrency symbol/id (e.g., bitcoin, ethereum)", 70 | }, 71 | }, 72 | }, 73 | }, 74 | }, 75 | }, nil 76 | } 77 | -------------------------------------------------------------------------------- /examples/plugins/crypto-price/pdk.gen.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "errors" 5 | 6 | pdk "github.com/extism/go-pdk" 7 | ) 8 | 9 | //export call 10 | func _Call() int32 { 11 | var err error 12 | _ = err 13 | pdk.Log(pdk.LogDebug, "Call: getting JSON input") 14 | var input CallToolRequest 15 | err = pdk.InputJSON(&input) 16 | if err != nil { 17 | pdk.SetError(err) 18 | return -1 19 | } 20 | 21 | pdk.Log(pdk.LogDebug, "Call: calling implementation function") 22 | output, err := Call(input) 23 | if err != nil { 24 | pdk.SetError(err) 25 | return -1 26 | } 27 | 28 | pdk.Log(pdk.LogDebug, "Call: setting JSON output") 29 | err = pdk.OutputJSON(output) 30 | if err != nil { 31 | pdk.SetError(err) 32 | return -1 33 | } 34 | 35 | pdk.Log(pdk.LogDebug, "Call: returning") 36 | return 0 37 | } 38 | 39 | //export describe 40 | func _Describe() int32 { 41 | var err error 42 | _ = err 43 | output, err := Describe() 44 | if err != nil { 45 | pdk.SetError(err) 46 | return -1 47 | } 48 | 49 | pdk.Log(pdk.LogDebug, "Describe: setting JSON output") 50 | err = pdk.OutputJSON(output) 51 | if err != nil { 52 | pdk.SetError(err) 53 | return -1 54 | } 55 | 56 | pdk.Log(pdk.LogDebug, "Describe: returning") 57 | return 0 58 | } 59 | 60 | type BlobResourceContents struct { 61 | // A base64-encoded string representing the binary data of the item. 62 | Blob string `json:"blob"` 63 | // The MIME type of this resource, if known. 64 | MimeType *string `json:"mimeType,omitempty"` 65 | // The URI of this resource. 66 | Uri string `json:"uri"` 67 | } 68 | 69 | // Used by the client to invoke a tool provided by the server. 70 | type CallToolRequest struct { 71 | Method *string `json:"method,omitempty"` 72 | Params Params `json:"params"` 73 | } 74 | 75 | // The server's response to a tool call. 76 | // 77 | // Any errors that originate from the tool SHOULD be reported inside the result 78 | // object, with `isError` set to true, _not_ as an MCP protocol-level error 79 | // response. Otherwise, the LLM would not be able to see that an error occurred 80 | // and self-correct. 81 | // 82 | // However, any errors in _finding_ the tool, an error indicating that the 83 | // server does not support tool calls, or any other exceptional conditions, 84 | // should be reported as an MCP error response. 85 | type CallToolResult struct { 86 | Content []Content `json:"content"` 87 | // Whether the tool call ended in an error. 88 | // 89 | // If not set, this is assumed to be false (the call was successful). 90 | IsError *bool `json:"isError,omitempty"` 91 | } 92 | 93 | // A content response. 94 | // For text content set type to ContentType.Text and set the `text` property 95 | // For image content set type to ContentType.Image and set the `data` and `mimeType` properties 96 | type Content struct { 97 | Annotations *TextAnnotation `json:"annotations,omitempty"` 98 | // The base64-encoded image data. 99 | Data *string `json:"data,omitempty"` 100 | // The MIME type of the image. Different providers may support different image types. 101 | MimeType *string `json:"mimeType,omitempty"` 102 | // The text content of the message. 103 | Text *string `json:"text,omitempty"` 104 | Type ContentType `json:"type"` 105 | } 106 | 107 | type ContentType string 108 | 109 | const ( 110 | ContentTypeText ContentType = "text" 111 | ContentTypeImage ContentType = "image" 112 | ContentTypeResource ContentType = "resource" 113 | ) 114 | 115 | func (v ContentType) String() string { 116 | switch v { 117 | case ContentTypeText: 118 | return `text` 119 | case ContentTypeImage: 120 | return `image` 121 | case ContentTypeResource: 122 | return `resource` 123 | default: 124 | return "" 125 | } 126 | } 127 | 128 | func stringToContentType(s string) (ContentType, error) { 129 | switch s { 130 | case `text`: 131 | return ContentTypeText, nil 132 | case `image`: 133 | return ContentTypeImage, nil 134 | case `resource`: 135 | return ContentTypeResource, nil 136 | default: 137 | return ContentType(""), errors.New("unable to convert string to ContentType") 138 | } 139 | } 140 | 141 | // Provides one or more descriptions of the tools available in this servlet. 142 | type ListToolsResult struct { 143 | // The list of ToolDescription objects provided by this servlet. 144 | Tools []ToolDescription `json:"tools"` 145 | } 146 | 147 | type Params struct { 148 | Arguments interface{} `json:"arguments,omitempty"` 149 | Name string `json:"name"` 150 | } 151 | 152 | // The sender or recipient of messages and data in a conversation. 153 | type Role string 154 | 155 | const ( 156 | RoleAssistant Role = "assistant" 157 | RoleUser Role = "user" 158 | ) 159 | 160 | func (v Role) String() string { 161 | switch v { 162 | case RoleAssistant: 163 | return `assistant` 164 | case RoleUser: 165 | return `user` 166 | default: 167 | return "" 168 | } 169 | } 170 | 171 | func stringToRole(s string) (Role, error) { 172 | switch s { 173 | case `assistant`: 174 | return RoleAssistant, nil 175 | case `user`: 176 | return RoleUser, nil 177 | default: 178 | return Role(""), errors.New("unable to convert string to Role") 179 | } 180 | } 181 | 182 | // A text annotation 183 | type TextAnnotation struct { 184 | // Describes who the intended customer of this object or data is. 185 | // 186 | // It can include multiple entries to indicate content useful for multiple audiences (e.g., `["user", "assistant"]`). 187 | Audience []Role `json:"audience,omitempty"` 188 | // Describes how important this data is for operating the server. 189 | // 190 | // A value of 1 means "most important," and indicates that the data is 191 | // effectively required, while 0 means "least important," and indicates that 192 | // the data is entirely optional. 193 | Priority float32 `json:"priority,omitempty"` 194 | } 195 | 196 | type TextResourceContents struct { 197 | // The MIME type of this resource, if known. 198 | MimeType *string `json:"mimeType,omitempty"` 199 | // The text of the item. This must only be set if the item can actually be represented as text (not binary data). 200 | Text string `json:"text"` 201 | // The URI of this resource. 202 | Uri string `json:"uri"` 203 | } 204 | 205 | // Describes the capabilities and expected paramters of the tool function 206 | type ToolDescription struct { 207 | // A description of the tool 208 | Description string `json:"description"` 209 | // The JSON schema describing the argument input 210 | InputSchema interface{} `json:"inputSchema"` 211 | // The name of the tool. It should match the plugin / binding name. 212 | Name string `json:"name"` 213 | } 214 | 215 | // Note: leave this in place, as the Go compiler will find the `export` function as the entrypoint. 216 | func main() {} 217 | -------------------------------------------------------------------------------- /examples/plugins/eval-py/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/eval-py/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/eval-py/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "eval-py" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | rustpython-vm = { version = "0.4.0", default-features = false, features = ["compiler"] } 15 | base64-serde = "0.8.0" 16 | base64 = "0.22.1" 17 | 18 | [profile.release] 19 | lto = true 20 | opt-level = 's' 21 | strip = true 22 | 23 | [target.wasm32-wasi.dependencies] 24 | getrandom = { version = "0.2", features = ["js"] } 25 | -------------------------------------------------------------------------------- /examples/plugins/eval-py/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/eval-py/README.md: -------------------------------------------------------------------------------- 1 | # eval-py 2 | 3 | An example of using [RustPython](https://github.com/RustPython/RustPython) to evaluate Python code. 4 | 5 | 6 | ![](/assets/eval-py.jpg) 7 | 8 | ## Usage 9 | 10 | ```json 11 | { 12 | "plugins": [ 13 | { 14 | "name": "eval-py", 15 | "path": "/home/anh/Code/hyper-mcp/examples/plugins/eval-py/target/wasm32-wasip1/release/plugin.wasm" 16 | } 17 | ] 18 | } 19 | ``` -------------------------------------------------------------------------------- /examples/plugins/eval-py/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use rustpython_vm::{ 4 | self as vm, Settings, 5 | scope::Scope, 6 | }; 7 | use std::{ 8 | cell::RefCell, 9 | collections::HashMap, 10 | rc::Rc, 11 | }; 12 | 13 | use extism_pdk::*; 14 | use json::Value; 15 | use pdk::types::{ 16 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 17 | }; 18 | use serde_json::json; 19 | 20 | struct StoredVirtualMachine { 21 | interp: vm::Interpreter, 22 | scope: Scope, 23 | } 24 | 25 | impl StoredVirtualMachine { 26 | fn new() -> Self { 27 | let mut scope = None; 28 | let mut settings = Settings::default(); 29 | settings.allow_external_library = false; 30 | 31 | let interp = vm::Interpreter::with_init(settings, |vm| { 32 | scope = Some(vm.new_scope_with_builtins()); 33 | }); 34 | 35 | StoredVirtualMachine { 36 | interp, 37 | scope: scope.unwrap(), 38 | } 39 | } 40 | } 41 | 42 | thread_local! { 43 | static STORED_VMS: RefCell>> = RefCell::default(); 44 | } 45 | 46 | fn get_or_create_vm(id: &str) -> Rc { 47 | STORED_VMS.with(|cell| { 48 | let mut vms = cell.borrow_mut(); 49 | if !vms.contains_key(id) { 50 | let stored_vm = StoredVirtualMachine::new(); 51 | vms.insert(id.to_string(), Rc::new(stored_vm)); 52 | } 53 | vms.get(id).unwrap().clone() 54 | }) 55 | } 56 | 57 | pub(crate) fn call(input: CallToolRequest) -> Result { 58 | match input.params.name.as_str() { 59 | "eval_python" => eval_python(input), 60 | _ => Ok(CallToolResult { 61 | is_error: Some(true), 62 | content: vec![Content { 63 | annotations: None, 64 | text: Some(format!("Unknown tool: {}", input.params.name)), 65 | mime_type: None, 66 | r#type: ContentType::Text, 67 | data: None, 68 | }], 69 | }), 70 | } 71 | } 72 | 73 | fn eval_python(input: CallToolRequest) -> Result { 74 | let args = input.params.arguments.unwrap_or_default(); 75 | if let Some(Value::String(code)) = args.get("code") { 76 | let stored_vm = get_or_create_vm("eval_python"); 77 | 78 | let result = stored_vm.interp.enter(|vm| { 79 | match vm 80 | .compile(code, vm::compiler::Mode::Single, "".to_owned()) 81 | .map_err(|err| vm.new_syntax_error(&err, Some(code))) 82 | .and_then(|code_obj| vm.run_code_obj(code_obj, stored_vm.scope.clone())) 83 | { 84 | Ok(output) => { 85 | if !vm.is_none(&output) { 86 | stored_vm.scope.globals.set_item("last", output.clone(), vm)?; 87 | 88 | match output.str(vm) { 89 | Ok(s) => Ok(s.to_string()), 90 | Err(e) => Err(e) 91 | } 92 | } else { 93 | Ok("None".to_string()) 94 | } 95 | }, 96 | Err(exc) => Err(exc) 97 | } 98 | }); 99 | 100 | match result { 101 | Ok(output) => { 102 | Ok(CallToolResult { 103 | is_error: None, 104 | content: vec![Content { 105 | annotations: None, 106 | text: Some(output), 107 | mime_type: Some("text/plain".to_string()), 108 | r#type: ContentType::Text, 109 | data: None, 110 | }], 111 | }) 112 | }, 113 | Err(exc) => { 114 | let mut error_msg = String::new(); 115 | stored_vm.interp.enter(|vm| { 116 | vm.write_exception(&mut error_msg, &exc).unwrap_or_default(); 117 | }); 118 | Ok(CallToolResult { 119 | is_error: Some(true), 120 | content: vec![Content { 121 | annotations: None, 122 | text: Some(error_msg), 123 | mime_type: None, 124 | r#type: ContentType::Text, 125 | data: None, 126 | }], 127 | }) 128 | } 129 | } 130 | } else { 131 | Ok(CallToolResult { 132 | is_error: Some(true), 133 | content: vec![Content { 134 | annotations: None, 135 | text: Some("Please provide Python code to evaluate".into()), 136 | mime_type: None, 137 | r#type: ContentType::Text, 138 | data: None, 139 | }], 140 | }) 141 | } 142 | } 143 | 144 | pub(crate) fn describe() -> Result { 145 | Ok(ListToolsResult{ 146 | tools: vec![ 147 | ToolDescription { 148 | name: "eval_python".into(), 149 | description: "Evaluates Python code using RustPython and returns the result. Use this like how you would use a REPL. This won't return the output of the code, but the result of the last expression.".into(), 150 | input_schema: json!({ 151 | "type": "object", 152 | "properties": { 153 | "code": { 154 | "type": "string", 155 | "description": "The Python code to evaluate", 156 | }, 157 | }, 158 | "required": ["code"], 159 | }) 160 | .as_object() 161 | .unwrap() 162 | .clone(), 163 | }, 164 | ], 165 | }) 166 | } 167 | -------------------------------------------------------------------------------- /examples/plugins/fetch/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/fetch/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/fetch/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "fetch" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0.219", features = ["derive"] } 13 | serde_json = "1.0.140" 14 | base64-serde = "0.8.0" 15 | base64 = "0.22.1" 16 | htmd = "0.1.6" 17 | -------------------------------------------------------------------------------- /examples/plugins/fetch/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/fetch/README.md: -------------------------------------------------------------------------------- 1 | # fetch 2 | 3 | src: https://github.com/dylibso/mcp.run-servlets/tree/main/servlets/fetch 4 | 5 | 6 | A servlet that fetches web pages and converts them to markdown. 7 | 8 | ## What it does 9 | 10 | Takes a URL, fetches the page content, strips out scripts and styles, and converts the HTML to markdown format. 11 | 12 | ## Usage 13 | 14 | Call with: 15 | ```json 16 | { 17 | "plugins": [ 18 | // {}, 19 | { 20 | "name": "fetch", 21 | "path": "oci://ghcr.io/tuananh/fetch-plugin:latest", 22 | "runtime_config": { 23 | "allowed_hosts": ["*"] 24 | } 25 | } 26 | ] 27 | } 28 | 29 | ``` 30 | 31 | Returns the page content converted to markdown format. -------------------------------------------------------------------------------- /examples/plugins/fetch/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use std::collections::BTreeMap; 4 | 5 | use extism_pdk::*; 6 | use htmd::HtmlToMarkdown; 7 | use json::Value; 8 | use pdk::types::{ 9 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 10 | }; 11 | use serde_json::json; 12 | 13 | pub(crate) fn call(input: CallToolRequest) -> Result { 14 | match input.params.name.as_str() { 15 | "fetch" => fetch(input), 16 | _ => Ok(CallToolResult { 17 | is_error: Some(true), 18 | content: vec![Content { 19 | annotations: None, 20 | text: Some(format!("Unknown tool: {}", input.params.name)), 21 | mime_type: None, 22 | r#type: ContentType::Text, 23 | data: None, 24 | }], 25 | }), 26 | } 27 | } 28 | 29 | fn fetch(input: CallToolRequest) -> Result { 30 | let args = input.params.arguments.unwrap_or_default(); 31 | if let Some(Value::String(url)) = args.get("url") { 32 | // Create HTTP request 33 | let mut req = HttpRequest { 34 | url: url.clone(), 35 | headers: BTreeMap::new(), 36 | method: Some("GET".to_string()), 37 | }; 38 | 39 | // Add a user agent header to be polite 40 | req.headers 41 | .insert("User-Agent".to_string(), "fetch-tool/1.0".to_string()); 42 | 43 | // Perform the request 44 | let res = http::request::<()>(&req, None)?; 45 | 46 | // Convert response body to string 47 | let body = res.body(); 48 | let html = String::from_utf8_lossy(body.as_slice()); 49 | 50 | let converter = HtmlToMarkdown::builder() 51 | .skip_tags(vec!["script", "style"]) 52 | .build(); 53 | 54 | // Convert HTML to markdown 55 | match converter.convert(&html) { 56 | Ok(markdown) => Ok(CallToolResult { 57 | is_error: None, 58 | content: vec![Content { 59 | annotations: None, 60 | text: Some(markdown), 61 | mime_type: Some("text/markdown".to_string()), 62 | r#type: ContentType::Text, 63 | data: None, 64 | }], 65 | }), 66 | Err(e) => Ok(CallToolResult { 67 | is_error: Some(true), 68 | content: vec![Content { 69 | annotations: None, 70 | text: Some(format!("Failed to convert HTML to markdown: {}", e)), 71 | mime_type: None, 72 | r#type: ContentType::Text, 73 | data: None, 74 | }], 75 | }), 76 | } 77 | } else { 78 | Ok(CallToolResult { 79 | is_error: Some(true), 80 | content: vec![Content { 81 | annotations: None, 82 | text: Some("Please provide a url".into()), 83 | mime_type: None, 84 | r#type: ContentType::Text, 85 | data: None, 86 | }], 87 | }) 88 | } 89 | } 90 | 91 | pub(crate) fn describe() -> Result { 92 | Ok(ListToolsResult{ 93 | tools: vec![ 94 | ToolDescription { 95 | name: "fetch".into(), 96 | description: "Enables to open and access arbitrary text URLs. Fetches the contents of a URL and returns its contents converted to markdown".into(), 97 | input_schema: json!({ 98 | "type": "object", 99 | "properties": { 100 | "url": { 101 | "type": "string", 102 | "description": "The URL to fetch", 103 | }, 104 | }, 105 | "required": ["url"], 106 | }) 107 | .as_object() 108 | .unwrap() 109 | .clone(), 110 | }, 111 | ], 112 | }) 113 | } 114 | -------------------------------------------------------------------------------- /examples/plugins/fs/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/fs/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/fs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "fs" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64-serde = "0.7" 15 | base64 = "0.21" 16 | -------------------------------------------------------------------------------- /examples/plugins/fs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/fs/README.md: -------------------------------------------------------------------------------- 1 | # fs 2 | 3 | An example plugin that implement filesystem operations. 4 | 5 | ## Usage 6 | 7 | ```json 8 | { 9 | "plugins": [ 10 | { 11 | "name": "fs", 12 | "path": "oci://ghcr.io/tuananh/fs-plugin:latest", 13 | "runtime_config": { 14 | "allowed_paths": ["/tmp"] 15 | } 16 | } 17 | ] 18 | } 19 | 20 | ``` 21 | -------------------------------------------------------------------------------- /examples/plugins/github/.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | -------------------------------------------------------------------------------- /examples/plugins/github/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tinygo/tinygo:0.37.0 AS builder 2 | 3 | WORKDIR /workspace 4 | COPY go.mod . 5 | COPY go.sum . 6 | RUN go mod download 7 | COPY . . 8 | RUN tinygo build -target wasi -o plugin.wasm . 9 | 10 | FROM scratch 11 | WORKDIR / 12 | COPY --from=builder /workspace/plugin.wasm /plugin.wasm 13 | -------------------------------------------------------------------------------- /examples/plugins/github/README.md: -------------------------------------------------------------------------------- 1 | # github 2 | 3 | [src](https://github.com/dylibso/mcp.run-servlets/tree/main/servlets/github) 4 | 5 | You can interact with GitHub via various tools available in this plugin: branches, repo, gist, issues, files, etc... 6 | 7 | ## Usage 8 | 9 | ```json 10 | { 11 | "plugins": [ 12 | { 13 | "name": "github", 14 | "path": "oci://ghcr.io/tuananh/github-plugin:latest", 15 | "runtime_config": { 16 | "allowed_hosts": [ 17 | "api.github.com" 18 | ], 19 | "env_vars": { 20 | "api-key": "ghp_xxxx" 21 | } 22 | } 23 | } 24 | ] 25 | } 26 | ``` -------------------------------------------------------------------------------- /examples/plugins/github/gists.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | 7 | "github.com/extism/go-pdk" 8 | ) 9 | 10 | var ( 11 | CreateGistTool = ToolDescription{ 12 | Name: "gh-create-gist", 13 | Description: "Create a GitHub Gist", 14 | InputSchema: schema{ 15 | "type": "object", 16 | "properties": props{ 17 | "description": prop("string", "Description of the gist"), 18 | "files": SchemaProperty{ 19 | Type: "object", 20 | Description: "Files contained in the gist.", 21 | AdditionalProperties: &schema{ 22 | "type": "object", 23 | "properties": schema{ 24 | "content": schema{ 25 | "type": "string", 26 | "description": "Content of the file", 27 | }, 28 | }, 29 | "required": []string{"content"}, 30 | }, 31 | }, 32 | }, 33 | "required": []string{"files"}, 34 | }, 35 | } 36 | GetGistTool = ToolDescription{ 37 | Name: "gh-get-gist", 38 | Description: "Gets a specified gist.", 39 | InputSchema: schema{ 40 | "type": "object", 41 | "properties": props{ 42 | "gist_id": prop("string", "The unique identifier of the gist."), 43 | }, 44 | "required": []string{"gist_id"}, 45 | }, 46 | } 47 | UpdateGistTool = ToolDescription{ 48 | Name: "gh-update-gist", 49 | Description: "Lists pull requests in a specified repository. Supports different response formats via accept parameter.", 50 | InputSchema: schema{ 51 | "type": "object", 52 | "properties": props{ 53 | "gist_id": prop("string", "The unique identifier of the gist."), 54 | "description": prop("string", "Description of the gist"), 55 | "files": SchemaProperty{ 56 | Type: "object", 57 | Description: "Files contained in the gist.", 58 | AdditionalProperties: &schema{ 59 | "type": "object", 60 | "properties": schema{ 61 | "content": schema{ 62 | "type": "string", 63 | "description": "Content of the file", 64 | }, 65 | }, 66 | "required": []string{"content"}, 67 | }, 68 | }, 69 | }, 70 | "required": []string{"gist_id"}, 71 | }, 72 | } 73 | DeleteGistTool = ToolDescription{ 74 | Name: "gh-delete-gist", 75 | Description: "Delete a specified gist.", 76 | InputSchema: schema{ 77 | "type": "object", 78 | "properties": props{ 79 | "gist_id": prop("string", "The unique identifier of the gist."), 80 | }, 81 | "required": []string{"gist_id"}, 82 | }, 83 | } 84 | ) 85 | 86 | var GistTools = []ToolDescription{ 87 | CreateGistTool, 88 | GetGistTool, 89 | UpdateGistTool, 90 | DeleteGistTool, 91 | } 92 | 93 | func gistCreate(apiKey, description string, files map[string]any) CallToolResult { 94 | url := "https://api.github.com/gists" 95 | req := pdk.NewHTTPRequest(pdk.MethodPost, url) 96 | req.SetHeader("Authorization", fmt.Sprintf("token %s", apiKey)) 97 | req.SetHeader("Content-Type", "application/json") 98 | req.SetHeader("Accept", "application/vnd.github+json") 99 | req.SetHeader("User-Agent", "github-mcpx-servlet") 100 | 101 | data := map[string]any{ 102 | "description": description, 103 | "files": files, 104 | } 105 | res, err := json.Marshal(data) 106 | if err != nil { 107 | return CallToolResult{ 108 | IsError: some(true), 109 | Content: []Content{{ 110 | Type: ContentTypeText, 111 | Text: some(fmt.Sprintf("Failed to marshal gist data: %s", err)), 112 | }}, 113 | } 114 | } 115 | req.SetBody(res) 116 | resp := req.Send() 117 | if resp.Status() != 201 { 118 | return CallToolResult{ 119 | IsError: some(true), 120 | Content: []Content{{ 121 | Type: ContentTypeText, 122 | Text: some(fmt.Sprintf("Failed to create gist: %d %s", resp.Status(), string(resp.Body()))), 123 | }}, 124 | } 125 | } 126 | 127 | return CallToolResult{ 128 | Content: []Content{{ 129 | Type: ContentTypeText, 130 | Text: some(string(resp.Body())), 131 | }}, 132 | } 133 | } 134 | 135 | func gistUpdate(apiKey, gistId, description string, files map[string]any) CallToolResult { 136 | url := fmt.Sprintf("https://api.github.com/gists/%s", gistId) 137 | req := pdk.NewHTTPRequest(pdk.MethodPatch, url) 138 | req.SetHeader("Authorization", fmt.Sprintf("token %s", apiKey)) 139 | req.SetHeader("Content-Type", "application/json") 140 | req.SetHeader("Accept", "application/vnd.github+json") 141 | req.SetHeader("User-Agent", "github-mcpx-servlet") 142 | 143 | data := map[string]any{ 144 | "description": description, 145 | "files": files, 146 | } 147 | res, err := json.Marshal(data) 148 | if err != nil { 149 | return CallToolResult{ 150 | IsError: some(true), 151 | Content: []Content{{ 152 | Type: ContentTypeText, 153 | Text: some(fmt.Sprintf("Failed to marshal gist data: %s", err)), 154 | }}, 155 | } 156 | } 157 | req.SetBody(res) 158 | resp := req.Send() 159 | if resp.Status() != 201 { 160 | return CallToolResult{ 161 | IsError: some(true), 162 | Content: []Content{{ 163 | Type: ContentTypeText, 164 | Text: some(fmt.Sprintf("Failed to create gist: %d %s", resp.Status(), string(resp.Body()))), 165 | }}, 166 | } 167 | } 168 | 169 | return CallToolResult{ 170 | Content: []Content{{ 171 | Type: ContentTypeText, 172 | Text: some(string(resp.Body())), 173 | }}, 174 | } 175 | } 176 | 177 | func gistGet(apiKey, gistId string) CallToolResult { 178 | url := fmt.Sprintf("https://api.github.com/gists/%s", gistId) 179 | req := pdk.NewHTTPRequest(pdk.MethodGet, url) 180 | req.SetHeader("Authorization", fmt.Sprintf("token %s", apiKey)) 181 | req.SetHeader("Content-Type", "application/json") 182 | req.SetHeader("Accept", "application/vnd.github+json") 183 | req.SetHeader("User-Agent", "github-mcpx-servlet") 184 | 185 | resp := req.Send() 186 | if resp.Status() != 201 { 187 | return CallToolResult{ 188 | IsError: some(true), 189 | Content: []Content{{ 190 | Type: ContentTypeText, 191 | Text: some(fmt.Sprintf("Failed to create branch: %d %s", resp.Status(), string(resp.Body()))), 192 | }}, 193 | } 194 | } 195 | 196 | return CallToolResult{ 197 | Content: []Content{{ 198 | Type: ContentTypeText, 199 | Text: some(string(resp.Body())), 200 | }}, 201 | } 202 | } 203 | 204 | func gistDelete(apiKey, gistId string) CallToolResult { 205 | url := fmt.Sprintf("https://api.github.com/gists/%s", gistId) 206 | req := pdk.NewHTTPRequest(pdk.MethodDelete, url) 207 | req.SetHeader("Authorization", fmt.Sprintf("token %s", apiKey)) 208 | req.SetHeader("Content-Type", "application/json") 209 | req.SetHeader("Accept", "application/vnd.github+json") 210 | req.SetHeader("User-Agent", "github-mcpx-servlet") 211 | 212 | resp := req.Send() 213 | if resp.Status() != 201 { 214 | return CallToolResult{ 215 | IsError: some(true), 216 | Content: []Content{{ 217 | Type: ContentTypeText, 218 | Text: some(fmt.Sprintf("Failed to create branch: %d %s", resp.Status(), string(resp.Body()))), 219 | }}, 220 | } 221 | } 222 | 223 | return CallToolResult{ 224 | Content: []Content{{ 225 | Type: ContentTypeText, 226 | Text: some(string(resp.Body())), 227 | }}, 228 | } 229 | } 230 | -------------------------------------------------------------------------------- /examples/plugins/github/go.mod: -------------------------------------------------------------------------------- 1 | module github 2 | 3 | go 1.24 4 | 5 | require github.com/extism/go-pdk v1.1.3 6 | -------------------------------------------------------------------------------- /examples/plugins/github/go.sum: -------------------------------------------------------------------------------- 1 | github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ= 2 | github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4= 3 | -------------------------------------------------------------------------------- /examples/plugins/github/main.go: -------------------------------------------------------------------------------- 1 | // Note: run `go doc -all` in this package to see all of the types and functions available. 2 | // ./pdk.gen.go contains the domain types from the host where your plugin will run. 3 | package main 4 | 5 | import ( 6 | "fmt" 7 | 8 | "github.com/extism/go-pdk" 9 | ) 10 | 11 | // Called when the tool is invoked. 12 | // If you support multiple tools, you must switch on the input.params.name to detect which tool is being called. 13 | // The name will match one of the tool names returned from "describe". 14 | // It takes CallToolRequest as input (The incoming tool request from the LLM) 15 | // And returns CallToolResult (The servlet's response to the given tool call) 16 | func Call(input CallToolRequest) (CallToolResult, error) { 17 | apiKey, ok := pdk.GetConfig("api-key") 18 | if !ok { 19 | return CallToolResult{ 20 | IsError: some(true), 21 | Content: []Content{{ 22 | Type: ContentTypeText, 23 | Text: some("No api-key configured"), 24 | }}, 25 | }, nil 26 | } 27 | args := input.Params.Arguments.(map[string]interface{}) 28 | pdk.Log(pdk.LogDebug, fmt.Sprint("Args: ", args)) 29 | switch input.Params.Name { 30 | case ListIssuesTool.Name: 31 | owner, _ := args["owner"].(string) 32 | repo, _ := args["repo"].(string) 33 | return issueList(apiKey, owner, repo, args) 34 | case GetIssueTool.Name: 35 | owner, _ := args["owner"].(string) 36 | repo, _ := args["repo"].(string) 37 | issue, _ := args["issue"].(float64) 38 | return issueGet(apiKey, owner, repo, int(issue)) 39 | case AddIssueCommentTool.Name: 40 | owner, _ := args["owner"].(string) 41 | repo, _ := args["repo"].(string) 42 | issue, _ := args["issue"].(float64) 43 | body, _ := args["body"].(string) 44 | return issueAddComment(apiKey, owner, repo, int(issue), body) 45 | case CreateIssueTool.Name: 46 | owner, _ := args["owner"].(string) 47 | repo, _ := args["repo"].(string) 48 | data := issueFromArgs(args) 49 | return issueCreate(apiKey, owner, repo, data) 50 | case UpdateIssueTool.Name: 51 | owner, _ := args["owner"].(string) 52 | repo, _ := args["repo"].(string) 53 | issue, _ := args["issue"].(float64) 54 | data := issueFromArgs(args) 55 | return issueUpdate(apiKey, owner, repo, int(issue), data) 56 | 57 | case GetFileContentsTool.Name: 58 | owner, _ := args["owner"].(string) 59 | repo, _ := args["repo"].(string) 60 | path, _ := args["path"].(string) 61 | branch, _ := args["branch"].(string) 62 | res := filesGetContents(apiKey, owner, repo, path, &branch) 63 | return res, nil 64 | case CreateOrUpdateFileTool.Name: 65 | owner, _ := args["owner"].(string) 66 | repo, _ := args["repo"].(string) 67 | path, _ := args["path"].(string) 68 | file := fileCreateFromArgs(args) 69 | return filesCreateOrUpdate(apiKey, owner, repo, path, file) 70 | 71 | case CreateBranchTool.Name: 72 | owner, _ := args["owner"].(string) 73 | repo, _ := args["repo"].(string) 74 | from, _ := args["branch"].(string) 75 | var maybeBranch *string 76 | if branch, ok := args["from_branch"].(string); ok { 77 | maybeBranch = &branch 78 | } 79 | return branchCreate(apiKey, owner, repo, from, maybeBranch), nil 80 | 81 | case ListPullRequestsTool.Name: 82 | owner, _ := args["owner"].(string) 83 | repo, _ := args["repo"].(string) 84 | return pullRequestList(apiKey, owner, repo, args) 85 | 86 | case CreatePullRequestTool.Name: 87 | owner, _ := args["owner"].(string) 88 | repo, _ := args["repo"].(string) 89 | pr := branchPullRequestSchemaFromArgs(args) 90 | return branchCreatePullRequest(apiKey, owner, repo, pr), nil 91 | 92 | case PushFilesTool.Name: 93 | owner, _ := args["owner"].(string) 94 | repo, _ := args["repo"].(string) 95 | branch, _ := args["branch"].(string) 96 | message, _ := args["message"].(string) 97 | files := filePushFromArgs(args) 98 | return filesPush(apiKey, owner, repo, branch, message, files), nil 99 | 100 | case ListReposTool.Name: 101 | owner, _ := args["owner"].(string) 102 | return reposList(apiKey, owner, args) 103 | 104 | case GetRepositoryCollaboratorsTool.Name: 105 | owner, _ := args["owner"].(string) 106 | repo, _ := args["repo"].(string) 107 | return reposGetCollaborators(apiKey, owner, repo, args) 108 | 109 | case GetRepositoryContributorsTool.Name: 110 | owner, _ := args["owner"].(string) 111 | repo, _ := args["repo"].(string) 112 | return reposGetContributors(apiKey, owner, repo, args) 113 | 114 | case GetRepositoryDetailsTool.Name: 115 | owner, _ := args["owner"].(string) 116 | repo, _ := args["repo"].(string) 117 | return reposGetDetails(apiKey, owner, repo) 118 | 119 | case CreateGistTool.Name: 120 | description, _ := args["description"].(string) 121 | files, _ := args["files"].(map[string]any) 122 | return gistCreate(apiKey, description, files), nil 123 | 124 | case GetGistTool.Name: 125 | gistId, _ := args["gist_id"].(string) 126 | return gistGet(apiKey, gistId), nil 127 | 128 | case UpdateGistTool.Name: 129 | gistId, _ := args["gist_id"].(string) 130 | description, _ := args["description"].(string) 131 | files, _ := args["files"].(map[string]any) 132 | return gistUpdate(apiKey, gistId, description, files), nil 133 | 134 | case DeleteGistTool.Name: 135 | gistId, _ := args["gist_id"].(string) 136 | return gistDelete(apiKey, gistId), nil 137 | 138 | default: 139 | return CallToolResult{ 140 | IsError: some(true), 141 | Content: []Content{{ 142 | Type: ContentTypeText, 143 | Text: some("Unknown tool " + input.Params.Name), 144 | }}, 145 | }, nil 146 | } 147 | 148 | } 149 | 150 | func Describe() (ListToolsResult, error) { 151 | toolsets := [][]ToolDescription{ 152 | IssueTools, 153 | FileTools, 154 | BranchTools, 155 | RepoTools, 156 | GistTools, 157 | } 158 | 159 | tools := []ToolDescription{} 160 | 161 | for _, toolset := range toolsets { 162 | tools = append(tools, toolset...) 163 | } 164 | 165 | // Ensure each tool's InputSchema has a required field 166 | for i := range tools { 167 | // Check if InputSchema is a map[string]interface{} 168 | if schema, ok := tools[i].InputSchema.(map[string]interface{}); ok { 169 | // Check if required field is missing 170 | if _, exists := schema["required"]; !exists { 171 | // Add an empty required array if it doesn't exist 172 | schema["required"] = []string{} 173 | tools[i].InputSchema = schema 174 | } 175 | } 176 | } 177 | 178 | return ListToolsResult{ 179 | Tools: tools, 180 | }, nil 181 | } 182 | 183 | func some[T any](t T) *T { 184 | return &t 185 | } 186 | 187 | type SchemaProperty struct { 188 | Type string `json:"type"` 189 | Description string `json:"description,omitempty"` 190 | AdditionalProperties *schema `json:"additionalProperties,omitempty"` 191 | Items *schema `json:"items,omitempty"` 192 | } 193 | 194 | func prop(tpe, description string) SchemaProperty { 195 | return SchemaProperty{Type: tpe, Description: description} 196 | } 197 | 198 | func arrprop(tpe, description, itemstpe string) SchemaProperty { 199 | items := schema{"type": itemstpe} 200 | return SchemaProperty{Type: tpe, Description: description, Items: &items} 201 | } 202 | 203 | type schema = map[string]interface{} 204 | type props = map[string]SchemaProperty 205 | -------------------------------------------------------------------------------- /examples/plugins/github/pdk.gen.go: -------------------------------------------------------------------------------- 1 | // THIS FILE WAS GENERATED BY `xtp-go-bindgen`. DO NOT EDIT. 2 | package main 3 | 4 | import ( 5 | "errors" 6 | 7 | pdk "github.com/extism/go-pdk" 8 | ) 9 | 10 | //export call 11 | func _Call() int32 { 12 | var err error 13 | _ = err 14 | pdk.Log(pdk.LogDebug, "Call: getting JSON input") 15 | var input CallToolRequest 16 | err = pdk.InputJSON(&input) 17 | if err != nil { 18 | pdk.SetError(err) 19 | return -1 20 | } 21 | 22 | pdk.Log(pdk.LogDebug, "Call: calling implementation function") 23 | output, err := Call(input) 24 | if err != nil { 25 | pdk.SetError(err) 26 | return -1 27 | } 28 | 29 | pdk.Log(pdk.LogDebug, "Call: setting JSON output") 30 | err = pdk.OutputJSON(output) 31 | if err != nil { 32 | pdk.SetError(err) 33 | return -1 34 | } 35 | 36 | pdk.Log(pdk.LogDebug, "Call: returning") 37 | return 0 38 | } 39 | 40 | //export describe 41 | func _Describe() int32 { 42 | var err error 43 | _ = err 44 | output, err := Describe() 45 | if err != nil { 46 | pdk.SetError(err) 47 | return -1 48 | } 49 | 50 | pdk.Log(pdk.LogDebug, "Describe: setting JSON output") 51 | err = pdk.OutputJSON(output) 52 | if err != nil { 53 | pdk.SetError(err) 54 | return -1 55 | } 56 | 57 | pdk.Log(pdk.LogDebug, "Describe: returning") 58 | return 0 59 | } 60 | 61 | // 62 | type BlobResourceContents struct { 63 | // A base64-encoded string representing the binary data of the item. 64 | Blob string `json:"blob"` 65 | // The MIME type of this resource, if known. 66 | MimeType *string `json:"mimeType,omitempty"` 67 | // The URI of this resource. 68 | Uri string `json:"uri"` 69 | } 70 | 71 | // Used by the client to invoke a tool provided by the server. 72 | type CallToolRequest struct { 73 | Method *string `json:"method,omitempty"` 74 | Params Params `json:"params"` 75 | } 76 | 77 | // The server's response to a tool call. 78 | // 79 | // Any errors that originate from the tool SHOULD be reported inside the result 80 | // object, with `isError` set to true, _not_ as an MCP protocol-level error 81 | // response. Otherwise, the LLM would not be able to see that an error occurred 82 | // and self-correct. 83 | // 84 | // However, any errors in _finding_ the tool, an error indicating that the 85 | // server does not support tool calls, or any other exceptional conditions, 86 | // should be reported as an MCP error response. 87 | type CallToolResult struct { 88 | Content []Content `json:"content"` 89 | // Whether the tool call ended in an error. 90 | // 91 | // If not set, this is assumed to be false (the call was successful). 92 | IsError *bool `json:"isError,omitempty"` 93 | } 94 | 95 | // A content response. 96 | // For text content set type to ContentType.Text and set the `text` property 97 | // For image content set type to ContentType.Image and set the `data` and `mimeType` properties 98 | type Content struct { 99 | Annotations *TextAnnotation `json:"annotations,omitempty"` 100 | // The base64-encoded image data. 101 | Data *string `json:"data,omitempty"` 102 | // The MIME type of the image. Different providers may support different image types. 103 | MimeType *string `json:"mimeType,omitempty"` 104 | // The text content of the message. 105 | Text *string `json:"text,omitempty"` 106 | Type ContentType `json:"type"` 107 | } 108 | 109 | // 110 | type ContentType string 111 | 112 | const ( 113 | ContentTypeText ContentType = "text" 114 | ContentTypeImage ContentType = "image" 115 | ContentTypeResource ContentType = "resource" 116 | ) 117 | 118 | func (v ContentType) String() string { 119 | switch v { 120 | case ContentTypeText: 121 | return `text` 122 | case ContentTypeImage: 123 | return `image` 124 | case ContentTypeResource: 125 | return `resource` 126 | default: 127 | return "" 128 | } 129 | } 130 | 131 | func stringToContentType(s string) (ContentType, error) { 132 | switch s { 133 | case `text`: 134 | return ContentTypeText, nil 135 | case `image`: 136 | return ContentTypeImage, nil 137 | case `resource`: 138 | return ContentTypeResource, nil 139 | default: 140 | return ContentType(""), errors.New("unable to convert string to ContentType") 141 | } 142 | } 143 | 144 | // Provides one or more descriptions of the tools available in this servlet. 145 | type ListToolsResult struct { 146 | // The list of ToolDescription objects provided by this servlet. 147 | Tools []ToolDescription `json:"tools"` 148 | } 149 | 150 | // 151 | type Params struct { 152 | Arguments interface{} `json:"arguments,omitempty"` 153 | Name string `json:"name"` 154 | } 155 | 156 | // The sender or recipient of messages and data in a conversation. 157 | type Role string 158 | 159 | const ( 160 | RoleAssistant Role = "assistant" 161 | RoleUser Role = "user" 162 | ) 163 | 164 | func (v Role) String() string { 165 | switch v { 166 | case RoleAssistant: 167 | return `assistant` 168 | case RoleUser: 169 | return `user` 170 | default: 171 | return "" 172 | } 173 | } 174 | 175 | func stringToRole(s string) (Role, error) { 176 | switch s { 177 | case `assistant`: 178 | return RoleAssistant, nil 179 | case `user`: 180 | return RoleUser, nil 181 | default: 182 | return Role(""), errors.New("unable to convert string to Role") 183 | } 184 | } 185 | 186 | // A text annotation 187 | type TextAnnotation struct { 188 | // Describes who the intended customer of this object or data is. 189 | // 190 | // It can include multiple entries to indicate content useful for multiple audiences (e.g., `["user", "assistant"]`). 191 | Audience []Role `json:"audience,omitempty"` 192 | // Describes how important this data is for operating the server. 193 | // 194 | // A value of 1 means "most important," and indicates that the data is 195 | // effectively required, while 0 means "least important," and indicates that 196 | // the data is entirely optional. 197 | Priority float32 `json:"priority,omitempty"` 198 | } 199 | 200 | // 201 | type TextResourceContents struct { 202 | // The MIME type of this resource, if known. 203 | MimeType *string `json:"mimeType,omitempty"` 204 | // The text of the item. This must only be set if the item can actually be represented as text (not binary data). 205 | Text string `json:"text"` 206 | // The URI of this resource. 207 | Uri string `json:"uri"` 208 | } 209 | 210 | // Describes the capabilities and expected paramters of the tool function 211 | type ToolDescription struct { 212 | // A description of the tool 213 | Description string `json:"description"` 214 | // The JSON schema describing the argument input 215 | InputSchema interface{} `json:"inputSchema"` 216 | // The name of the tool. It should match the plugin / binding name. 217 | Name string `json:"name"` 218 | } 219 | 220 | // Note: leave this in place, as the Go compiler will find the `export` function as the entrypoint. 221 | func main() {} 222 | -------------------------------------------------------------------------------- /examples/plugins/gitlab/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/gitlab/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/gitlab/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gitlab" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64-serde = "0.7" 15 | base64 = "0.21" 16 | urlencoding = "2.1" 17 | url = "2.5" 18 | termtree = "0.5.1" 19 | -------------------------------------------------------------------------------- /examples/plugins/gitlab/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/gitlab/README.md: -------------------------------------------------------------------------------- 1 | # gitlab 2 | 3 | A plugin that implements GitLab operations including issue management, file handling, branch management, and snippet operations. 4 | 5 | ## Configuration 6 | 7 | The plugin requires the following configuration: 8 | 9 | - `GITLAB_TOKEN`: (Required) Your GitLab personal access token 10 | - `GITLAB_URL`: (Optional) Your GitLab instance URL. Defaults to `https://gitlab.com/api/v4` 11 | 12 | ## Usage 13 | 14 | ```json 15 | { 16 | "plugins": [ 17 | { 18 | "name": "gitlab", 19 | "path": "oci://ghcr.io/tuananh/gitlab-plugin:latest", 20 | "runtime_config": { 21 | "allowed_hosts": ["gitlab.com"], // Your GitLab host 22 | "env_vars": { 23 | "GITLAB_TOKEN": "your-gitlab-token", 24 | "GITLAB_URL": "https://gitlab.com/api/v4" // Optional, defaults to GitLab.com 25 | } 26 | } 27 | } 28 | ] 29 | } 30 | ``` 31 | 32 | ## Available Operations 33 | 34 | ### Issues 35 | - [x] `gl_create_issue`: Create a new issue 36 | - [x] `gl_get_issue`: Get issue details 37 | - [x] `gl_update_issue`: Update an existing issue 38 | - [x] `gl_add_issue_comment`: Add a comment to an issue 39 | - [x] `gl_list_issues`: List issues for a project in GitLab. Supports filtering by state and labels. 40 | 41 | ### Files 42 | - [x] `gl_get_file_contents`: Get file contents 43 | - [x] `gl_create_or_update_file`: Create or update a file 44 | - [x] `gl_delete_file`: Delete a file from the repository 45 | - [ ] `gl_push_files`: Push multiple files 46 | 47 | ### Branches and Merge Requests 48 | - [x] `gl_create_branch`: Create a new branch 49 | - [x] `gl_list_branches`: List all branches in a GitLab project 50 | - [x] `gl_create_merge_request`: Create a merge request 51 | - [x] `gl_update_merge_request`: Update an existing merge request in a GitLab project. 52 | - [x] `gl_get_merge_request`: Get details of a specific merge request in a GitLab project. 53 | 54 | ### Snippets 55 | - [x] `gl_create_snippet`: Create a new snippet 56 | - [x] `gl_update_snippet`: Update an existing snippet 57 | - [x] `gl_get_snippet`: Get snippet details 58 | - [x] `gl_delete_snippet`: Delete a snippet 59 | 60 | ### Repository 61 | - [x] `gl_get_repo_tree`: Get the list of files and directories in a project repository. Handles pagination internally. 62 | - [x] `gl_get_repo_members`: Get a list of members for a GitLab project. Supports fetching direct or inherited members and filtering by query. Handles pagination internally. 63 | -------------------------------------------------------------------------------- /examples/plugins/gomodule/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/gomodule/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/gomodule/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gomodule" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64-serde = "0.7" 15 | base64 = "0.21" 16 | -------------------------------------------------------------------------------- /examples/plugins/gomodule/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/gomodule/README.md: -------------------------------------------------------------------------------- 1 | # gomodule 2 | 3 | A plugin that fetches Go module information and latest versions from `proxy.golang.org`. 4 | 5 | ## What it does 6 | 7 | Provides two main functionalities: 8 | 1. `go_module_latest_version`: Fetches the latest version of multiple Go modules 9 | 2. `go_module_info`: Fetches detailed information about multiple Go modules 10 | 11 | ## Usage 12 | 13 | Call with: 14 | ```json 15 | { 16 | "plugins": [ 17 | { 18 | "name": "gomodule", 19 | "path": "oci://ghcr.io/tuananh/gomodule-plugin:latest", 20 | "runtime_config": { 21 | "allowed_hosts": ["proxy.golang.org"] 22 | } 23 | } 24 | ] 25 | } 26 | ``` 27 | 28 | ### Example Usage 29 | 30 | 1. Get latest version of multiple Go modules: 31 | ```json 32 | { 33 | "name": "go_module_latest_version", 34 | "params": { 35 | "module_names": "github.com/spf13/cobra,github.com/gorilla/mux,github.com/gin-gonic/gin" 36 | } 37 | } 38 | ``` 39 | 40 | 2. Get detailed information about multiple Go modules: 41 | ```json 42 | { 43 | "name": "go_module_info", 44 | "params": { 45 | "module_names": "github.com/spf13/cobra,github.com/gorilla/mux,github.com/gin-gonic/gin" 46 | } 47 | } 48 | ``` 49 | 50 | Returns: 51 | - For `go_module_latest_version`: A JSON object mapping module names to their latest version numbers 52 | - For `go_module_info`: An array of JSON objects containing detailed module information for each module, including: 53 | - Name 54 | - Latest version 55 | - Time 56 | - Version 57 | - And other metadata from proxy.golang.org -------------------------------------------------------------------------------- /examples/plugins/gomodule/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use std::collections::BTreeMap; 4 | 5 | use extism_pdk::*; 6 | use json::Value; 7 | use pdk::types::{ 8 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 9 | }; 10 | use serde_json::json; 11 | 12 | pub(crate) fn call(input: CallToolRequest) -> Result { 13 | match input.params.name.as_str() { 14 | "gomodule_latest_version" => latest_version(input), 15 | "gomodule_info" => module_info(input), 16 | _ => Ok(CallToolResult { 17 | is_error: Some(true), 18 | content: vec![Content { 19 | annotations: None, 20 | text: Some(format!("Unknown tool: {}", input.params.name)), 21 | mime_type: None, 22 | r#type: ContentType::Text, 23 | data: None, 24 | }], 25 | }), 26 | } 27 | } 28 | 29 | fn module_info(input: CallToolRequest) -> Result { 30 | let args = input.params.arguments.unwrap_or_default(); 31 | if let Some(Value::String(module_names)) = args.get("module_names") { 32 | let module_names: Vec<&str> = module_names.split(',').map(|s| s.trim()).collect(); 33 | let mut results = Vec::new(); 34 | 35 | for module_name in module_names { 36 | let mut req = HttpRequest { 37 | url: format!("https://proxy.golang.org/{}/@latest", module_name), 38 | headers: BTreeMap::new(), 39 | method: Some("GET".to_string()), 40 | }; 41 | 42 | req.headers 43 | .insert("User-Agent".to_string(), "hyper-mcp/1.0".to_string()); 44 | 45 | let res = http::request::<()>(&req, None)?; 46 | 47 | let body = res.body(); 48 | let json_str = String::from_utf8_lossy(body.as_slice()); 49 | 50 | let json: serde_json::Value = serde_json::from_str(&json_str)?; 51 | 52 | // TODO: figure out how to get module license 53 | results.push(json); 54 | } 55 | 56 | if !results.is_empty() { 57 | Ok(CallToolResult { 58 | is_error: None, 59 | content: vec![Content { 60 | annotations: None, 61 | text: Some(serde_json::to_string(&results)?), 62 | mime_type: Some("text/plain".to_string()), 63 | r#type: ContentType::Text, 64 | data: None, 65 | }], 66 | }) 67 | } else { 68 | Ok(CallToolResult { 69 | is_error: Some(true), 70 | content: vec![Content { 71 | annotations: None, 72 | text: Some("Failed to get module information".into()), 73 | mime_type: None, 74 | r#type: ContentType::Text, 75 | data: None, 76 | }], 77 | }) 78 | } 79 | } else { 80 | Ok(CallToolResult { 81 | is_error: Some(true), 82 | content: vec![Content { 83 | annotations: None, 84 | text: Some("Please provide module names".into()), 85 | mime_type: None, 86 | r#type: ContentType::Text, 87 | data: None, 88 | }], 89 | }) 90 | } 91 | } 92 | 93 | fn latest_version(input: CallToolRequest) -> Result { 94 | let args = input.params.arguments.unwrap_or_default(); 95 | if let Some(Value::String(module_names)) = args.get("module_names") { 96 | let module_names: Vec<&str> = module_names.split(',').map(|s| s.trim()).collect(); 97 | let mut results = BTreeMap::new(); 98 | 99 | for module_name in module_names { 100 | let mut req = HttpRequest { 101 | url: format!("https://proxy.golang.org/{}/@latest", module_name), 102 | headers: BTreeMap::new(), 103 | method: Some("GET".to_string()), 104 | }; 105 | 106 | req.headers 107 | .insert("User-Agent".to_string(), "hyper-mcp/1.0".to_string()); 108 | 109 | let res = http::request::<()>(&req, None)?; 110 | 111 | let body = res.body(); 112 | let json_str = String::from_utf8_lossy(body.as_slice()); 113 | 114 | let json: serde_json::Value = serde_json::from_str(&json_str)?; 115 | 116 | if let Some(version) = json["Version"].as_str() { 117 | results.insert(module_name.to_string(), version.to_string()); 118 | } 119 | } 120 | 121 | if !results.is_empty() { 122 | Ok(CallToolResult { 123 | is_error: None, 124 | content: vec![Content { 125 | annotations: None, 126 | text: Some(serde_json::to_string(&results)?), 127 | mime_type: Some("text/plain".to_string()), 128 | r#type: ContentType::Text, 129 | data: None, 130 | }], 131 | }) 132 | } else { 133 | Ok(CallToolResult { 134 | is_error: Some(true), 135 | content: vec![Content { 136 | annotations: None, 137 | text: Some("Failed to get latest versions".into()), 138 | mime_type: None, 139 | r#type: ContentType::Text, 140 | data: None, 141 | }], 142 | }) 143 | } 144 | } else { 145 | Ok(CallToolResult { 146 | is_error: Some(true), 147 | content: vec![Content { 148 | annotations: None, 149 | text: Some("Please provide module names".into()), 150 | mime_type: None, 151 | r#type: ContentType::Text, 152 | data: None, 153 | }], 154 | }) 155 | } 156 | } 157 | 158 | pub(crate) fn describe() -> Result { 159 | Ok(ListToolsResult { 160 | tools: vec![ 161 | ToolDescription { 162 | name: "gomodule_latest_version".into(), 163 | description: "Fetches the latest version of multiple Go modules. Assume it's github.com if not specified".into(), 164 | input_schema: json!({ 165 | "type": "object", 166 | "properties": { 167 | "module_names": { 168 | "type": "string", 169 | "description": "Comma-separated list of Go module names to get the latest versions for", 170 | }, 171 | }, 172 | "required": ["module_names"], 173 | }) 174 | .as_object() 175 | .unwrap() 176 | .clone(), 177 | }, 178 | ToolDescription { 179 | name: "gomodule_info".into(), 180 | description: "Fetches detailed information about multiple Go modules. Assume it's github.com if not specified".into(), 181 | input_schema: json!({ 182 | "type": "object", 183 | "properties": { 184 | "module_names": { 185 | "type": "string", 186 | "description": "Comma-separated list of Go module names to get information for", 187 | }, 188 | }, 189 | "required": ["module_names"], 190 | }) 191 | .as_object() 192 | .unwrap() 193 | .clone(), 194 | }, 195 | ], 196 | }) 197 | } 198 | -------------------------------------------------------------------------------- /examples/plugins/hash/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /examples/plugins/hash/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hash" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64 = "0.21" 15 | base64-serde = "0.8.0" 16 | sha2 = "0.10" 17 | md5 = "0.7" 18 | sha1 = "0.10" 19 | base32 = "0.4" 20 | -------------------------------------------------------------------------------- /examples/plugins/hash/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/hash/README.md: -------------------------------------------------------------------------------- 1 | # hash 2 | 3 | A hyper-mcp plugin that generates do all kind of hashes for you. 4 | 5 | Supported hashing tools: 6 | 7 | - [x] base64 8 | - [x] base32 9 | - [x] sha256 10 | - [x] sha512 11 | - [x] md5 12 | - [x] sha1 13 | - [x] sha224 14 | - [x] sha384 15 | 16 | ## What it does 17 | 18 | Takes input text and hash it. 19 | -------------------------------------------------------------------------------- /examples/plugins/hash/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use base64::Engine; 4 | use extism_pdk::*; 5 | use pdk::types::*; 6 | use serde_json::json; 7 | use sha1::Sha1; 8 | use sha2::{Digest, Sha224, Sha256, Sha384, Sha512}; 9 | 10 | // Called when the tool is invoked. 11 | pub(crate) fn call(input: CallToolRequest) -> Result { 12 | extism_pdk::log!( 13 | LogLevel::Info, 14 | "called with args: {:?}", 15 | input.params.arguments 16 | ); 17 | let args = input.params.arguments.unwrap_or_default(); 18 | 19 | let data = match args.get("data") { 20 | Some(v) => v.as_str().unwrap(), 21 | None => return Err(Error::msg("`data` is required")), 22 | }; 23 | 24 | let algorithm = match args.get("algorithm") { 25 | Some(v) => v.as_str().unwrap(), 26 | None => return Err(Error::msg("`algorithm` is required")), 27 | }; 28 | 29 | let result = match algorithm { 30 | "sha256" => { 31 | let mut hasher = Sha256::new(); 32 | hasher.update(data.as_bytes()); 33 | format!("{:x}", hasher.finalize()) 34 | } 35 | "sha512" => { 36 | let mut hasher = Sha512::new(); 37 | hasher.update(data.as_bytes()); 38 | format!("{:x}", hasher.finalize()) 39 | } 40 | "sha384" => { 41 | let mut hasher = Sha384::new(); 42 | hasher.update(data.as_bytes()); 43 | format!("{:x}", hasher.finalize()) 44 | } 45 | "sha224" => { 46 | let mut hasher = Sha224::new(); 47 | hasher.update(data.as_bytes()); 48 | format!("{:x}", hasher.finalize()) 49 | } 50 | "sha1" => { 51 | let mut hasher = Sha1::new(); 52 | hasher.update(data.as_bytes()); 53 | format!("{:x}", hasher.finalize()) 54 | } 55 | "md5" => { 56 | format!("{:x}", md5::compute(data)) 57 | } 58 | "base32" => base32::encode(base32::Alphabet::RFC4648 { padding: true }, data.as_bytes()), 59 | "base64" | _ => base64::engine::general_purpose::STANDARD.encode(data), 60 | }; 61 | 62 | Ok(CallToolResult { 63 | is_error: None, 64 | content: vec![Content { 65 | annotations: None, 66 | text: Some(result), 67 | mime_type: Some("text/plain".into()), 68 | r#type: ContentType::Text, 69 | data: None, 70 | }], 71 | }) 72 | } 73 | 74 | pub(crate) fn describe() -> Result { 75 | Ok(ListToolsResult { 76 | tools: vec![ToolDescription { 77 | name: "hash".into(), 78 | description: "Hash data using various algorithms: sha256, sha512, sha384, sha224, sha1, md5, base32, base64".into(), 79 | input_schema: json!({ 80 | "type": "object", 81 | "properties": { 82 | "data": { 83 | "type": "string", 84 | "description": "data to convert to hash or encoded format" 85 | }, 86 | "algorithm": { 87 | "type": "string", 88 | "description": "algorithm to use for hashing or encoding", 89 | "enum": ["sha256", "sha512", "sha384", "sha224", "sha1", "md5", "base32", "base64"] 90 | } 91 | }, 92 | "required": ["data", "algorithm"] 93 | }).as_object().unwrap().clone(), 94 | }], 95 | }) 96 | } 97 | -------------------------------------------------------------------------------- /examples/plugins/meme-generator/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/meme-generator/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/meme-generator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "meme-generator" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | ab_glyph = "0.2.29" 12 | image = "0.25.6" 13 | imageproc = "0.25.0" 14 | rusttype = "0.9.3" 15 | serde = { version = "1.0", features = ["derive"] } 16 | serde_json = "1.0" 17 | reqwest = { version = "0.11", features = ["blocking"] } 18 | serde_yaml = "0.9" 19 | extism-pdk = "=1.4.0" 20 | base64-serde = "0.7" 21 | base64 = "0.21" 22 | -------------------------------------------------------------------------------- /examples/plugins/meme-generator/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/meme-generator/README.md: -------------------------------------------------------------------------------- 1 | # meme-generator 2 | 3 | A plugin for generating memes using predefined templates with text overlays. 4 | 5 | ## What it does 6 | 7 | Generates memes by overlaying customized text on predefined meme templates in classic meme style. The plugin supports various text styles, alignments, and positioning based on template configurations. 8 | 9 | ## Usage 10 | 11 | Call with: 12 | ```json 13 | { 14 | "plugins": [ 15 | { 16 | "name": "meme-generator", 17 | "path": "oci://ghcr.io/tuananh/meme-generator-plugin:latest" 18 | } 19 | ] 20 | } 21 | ``` 22 | 23 | The plugin provides the following tools: 24 | 25 | ### meme_list_templates 26 | Lists all available meme templates. 27 | 28 | ### meme_get_template 29 | Gets details about a specific meme template. 30 | 31 | Parameters: 32 | - `template_id`: The ID of the template to retrieve 33 | 34 | ### meme_generate 35 | Generates a meme using a template and custom text. 36 | 37 | Parameters: 38 | - `template_id`: The ID of the template to use 39 | - `texts`: Array of text strings to place on the meme according to the template configuration 40 | 41 | Each template can have specific configurations for: 42 | - Text positioning and alignment 43 | - Font scaling and style (uppercase/normal) 44 | - Text color 45 | - Multiple text overlays 46 | 47 | The generated output is a PNG image with the text overlaid on the template according to the specified configuration. -------------------------------------------------------------------------------- /examples/plugins/meme-generator/generate_embedded.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | def main(): 5 | # Read templates.json to get list of template IDs 6 | with open('templates.json', 'r') as f: 7 | templates = json.load(f) 8 | 9 | template_ids = [t['id'] for t in templates] 10 | 11 | # Start generating embedded.rs 12 | output = [] 13 | output.append('// Embed templates.json') 14 | output.append('pub const TEMPLATES_JSON: &str = include_str!("../templates.json");') 15 | output.append('') 16 | output.append('// Embed font data') 17 | output.append('pub const FONT_DATA: &[u8] = include_bytes!("../assets/fonts/TitilliumWeb-Black.ttf");') 18 | output.append('') 19 | output.append('// Function to get template config') 20 | output.append('pub fn get_template_config(template_id: &str) -> Option<&\'static str> {') 21 | output.append(' match template_id {') 22 | 23 | # Add template configs 24 | for template_id in template_ids: 25 | config_path = f'assets/templates/{template_id}/config.yml' 26 | if os.path.exists(config_path): 27 | output.append(f' "{template_id}" => Some(include_str!("../assets/templates/{template_id}/config.yml")),') 28 | 29 | output.append(' _ => None') 30 | output.append(' }') 31 | output.append('}') 32 | output.append('') 33 | 34 | # Add template images 35 | output.append('// Function to get template image') 36 | output.append('pub fn get_template_image(template_id: &str, image_name: &str) -> Option<&\'static [u8]> {') 37 | output.append(' match (template_id, image_name) {') 38 | 39 | for template_id in template_ids: 40 | template_dir = f'assets/templates/{template_id}' 41 | if os.path.exists(template_dir): 42 | for file in os.listdir(template_dir): 43 | if file.endswith(('.jpg', '.png', '.gif')): 44 | output.append(f' ("{template_id}", "{file}") => Some(include_bytes!("../assets/templates/{template_id}/{file}")),') 45 | 46 | output.append(' _ => None') 47 | output.append(' }') 48 | output.append('}') 49 | 50 | # Write output 51 | with open('src/embedded.rs', 'w') as f: 52 | f.write('\n'.join(output)) 53 | 54 | if __name__ == '__main__': 55 | main() -------------------------------------------------------------------------------- /examples/plugins/memory/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/memory/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/memory/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "memory" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64 = "0.21" 15 | base64-serde = "0.8.0" 16 | uuid = { version = "1.16", features = ["v4", "serde"] } 17 | rusqlite = { version = "0.34.0", features = ["bundled"] } 18 | 19 | [build-dependencies] 20 | cc = "1.0" 21 | -------------------------------------------------------------------------------- /examples/plugins/memory/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | # Install wasi-sdk 8 | ENV WASI_OS=linux \ 9 | WASI_VERSION=25 \ 10 | WASI_VERSION_FULL=25.0 11 | 12 | # Detect architecture and set WASI_ARCH accordingly 13 | RUN apt-get update && apt-get install -y wget && \ 14 | ARCH=$(uname -m) && \ 15 | if [ "$ARCH" = "x86_64" ]; then \ 16 | export WASI_ARCH=x86_64; \ 17 | elif [ "$ARCH" = "aarch64" ]; then \ 18 | export WASI_ARCH=arm64; \ 19 | else \ 20 | echo "Unsupported architecture: $ARCH" && exit 1; \ 21 | fi && \ 22 | cd /opt && \ 23 | wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_VERSION}/wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 24 | tar xvf wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 25 | rm wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 26 | mv wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS} wasi-sdk 27 | 28 | WORKDIR /workspace 29 | COPY . . 30 | RUN cargo fetch 31 | ENV WASI_SDK_PATH=/opt/wasi-sdk 32 | ENV CC_wasm32_wasip1="${WASI_SDK_PATH}/bin/clang --sysroot=${WASI_SDK_PATH}/share/wasi-sysroot" 33 | 34 | RUN cargo auditable build --release --target wasm32-wasip1 35 | 36 | FROM scratch 37 | WORKDIR / 38 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 39 | -------------------------------------------------------------------------------- /examples/plugins/memory/README.md: -------------------------------------------------------------------------------- 1 | # memory 2 | 3 | A plugin that let you save & retrieve memory, backed by SQLite. 4 | 5 | ## Usage 6 | 7 | Call with: 8 | ```json 9 | { 10 | "plugins": [ 11 | // {}, 12 | { 13 | "name": "memory", 14 | "path": "/home/anh/Code/hyper-mcp/examples/plugins/memory/target/wasm32-wasip1/release/plugin.wasm", 15 | "runtime_config": { 16 | "allowed_paths": ["/tmp"], 17 | "env_vars": { 18 | "db_path": "/tmp/memory.db" 19 | } 20 | } 21 | } 22 | ] 23 | } 24 | 25 | ``` 26 | 27 | ## How to build 28 | 29 | This plugin requires you to have [wasi-sdk](https://github.com/WebAssembly/wasi-sdk) installed. 30 | 31 | ```sh 32 | export WASI_SDK_PATH=`` # in my case, it's /opt/wasi-sdk 33 | export CC_wasm32_wasip1="${WASI_SDK_PATH}/bin/clang --sysroot=${WASI_SDK_PATH}/share/wasi-sysroot" 34 | cargo build --release --target wasm32-wasip1 35 | ``` 36 | 37 | See [Dockerfile](./Dockerfile) for reference. 38 | -------------------------------------------------------------------------------- /examples/plugins/memory/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use extism_pdk::*; 4 | use pdk::types::{ 5 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 6 | }; 7 | use rusqlite::{Connection, params}; 8 | use serde::{Deserialize, Serialize}; 9 | use serde_json::json; 10 | use std::sync::Once; 11 | use uuid::Uuid; 12 | 13 | static DB_INIT: Once = Once::new(); 14 | 15 | #[derive(Debug, Serialize, Deserialize)] 16 | struct Memory { 17 | id: String, 18 | content: String, 19 | } 20 | 21 | fn init_db(db_path: &str) -> Result<(), Error> { 22 | let conn = Connection::open_with_flags( 23 | db_path, 24 | rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE | rusqlite::OpenFlags::SQLITE_OPEN_CREATE, 25 | )?; 26 | 27 | conn.execute( 28 | "CREATE TABLE IF NOT EXISTS memories ( 29 | id TEXT PRIMARY KEY, 30 | content TEXT NOT NULL, 31 | created_at INTEGER DEFAULT (strftime('%s', 'now')) 32 | )", 33 | [], 34 | )?; 35 | 36 | Ok(()) 37 | } 38 | 39 | fn get_db_path() -> Result { 40 | config::get("db_path")? 41 | .ok_or_else(|| Error::msg("db_path configuration is required but not set")) 42 | } 43 | 44 | fn store_memory(content: &str, db_path: &str) -> Result { 45 | let conn = Connection::open_with_flags(db_path, rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE)?; 46 | let id = Uuid::new_v4().to_string(); 47 | 48 | conn.execute( 49 | "INSERT INTO memories (id, content) VALUES (?, ?)", 50 | params![id, content], 51 | )?; 52 | 53 | Ok(id) 54 | } 55 | 56 | fn get_memory(id: &str, db_path: &str) -> Result, Error> { 57 | let conn = Connection::open_with_flags(db_path, rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE)?; 58 | 59 | let mut stmt = conn.prepare("SELECT id, content FROM memories WHERE id = ?")?; 60 | let mut rows = stmt.query(params![id])?; 61 | 62 | if let Some(row) = rows.next()? { 63 | Ok(Some(Memory { 64 | id: row.get(0)?, 65 | content: row.get(1)?, 66 | })) 67 | } else { 68 | Ok(None) 69 | } 70 | } 71 | 72 | pub(crate) fn call(input: CallToolRequest) -> Result { 73 | let db_path = get_db_path()?; 74 | DB_INIT.call_once(|| { 75 | init_db(&db_path).expect("Failed to initialize database"); 76 | }); 77 | 78 | match input.params.name.as_str() { 79 | "store_memory" => { 80 | let args = input.params.arguments.unwrap_or_default(); 81 | let content = match args.get("content") { 82 | Some(v) if v.is_string() => v.as_str().unwrap(), 83 | _ => return Err(Error::msg("content parameter is required")), 84 | }; 85 | 86 | let id = store_memory(content, &db_path)?; 87 | 88 | Ok(CallToolResult { 89 | is_error: None, 90 | content: vec![Content { 91 | annotations: None, 92 | text: Some(json!({ "id": id }).to_string()), 93 | mime_type: Some("application/json".to_string()), 94 | r#type: ContentType::Text, 95 | data: None, 96 | }], 97 | }) 98 | } 99 | "get_memory" => { 100 | let args = input.params.arguments.unwrap_or_default(); 101 | let id = match args.get("id") { 102 | Some(v) if v.is_string() => v.as_str().unwrap(), 103 | _ => return Err(Error::msg("id parameter is required")), 104 | }; 105 | 106 | match get_memory(id, &db_path)? { 107 | Some(memory) => Ok(CallToolResult { 108 | is_error: None, 109 | content: vec![Content { 110 | annotations: None, 111 | text: Some(serde_json::to_string(&memory)?), 112 | mime_type: Some("application/json".to_string()), 113 | r#type: ContentType::Text, 114 | data: None, 115 | }], 116 | }), 117 | None => Ok(CallToolResult { 118 | is_error: Some(true), 119 | content: vec![Content { 120 | annotations: None, 121 | text: Some("Memory not found".to_string()), 122 | mime_type: None, 123 | r#type: ContentType::Text, 124 | data: None, 125 | }], 126 | }), 127 | } 128 | } 129 | _ => Ok(CallToolResult { 130 | is_error: Some(true), 131 | content: vec![Content { 132 | annotations: None, 133 | text: Some(format!("Unknown tool: {}", input.params.name)), 134 | mime_type: None, 135 | r#type: ContentType::Text, 136 | data: None, 137 | }], 138 | }), 139 | } 140 | } 141 | 142 | pub(crate) fn describe() -> Result { 143 | Ok(ListToolsResult { 144 | tools: vec![ 145 | ToolDescription { 146 | name: "store_memory".into(), 147 | description: "Store content in memory and return a unique ID".into(), 148 | input_schema: json!({ 149 | "type": "object", 150 | "properties": { 151 | "content": { 152 | "type": "string", 153 | "description": "The content to store", 154 | } 155 | }, 156 | "required": ["content"], 157 | }) 158 | .as_object() 159 | .unwrap() 160 | .clone(), 161 | }, 162 | ToolDescription { 163 | name: "get_memory".into(), 164 | description: "Retrieve content from memory by ID".into(), 165 | input_schema: json!({ 166 | "type": "object", 167 | "properties": { 168 | "id": { 169 | "type": "string", 170 | "description": "The ID of the content to retrieve", 171 | } 172 | }, 173 | "required": ["id"], 174 | }) 175 | .as_object() 176 | .unwrap() 177 | .clone(), 178 | }, 179 | ], 180 | }) 181 | } 182 | -------------------------------------------------------------------------------- /examples/plugins/myip/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /examples/plugins/myip/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "myip" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64 = "0.21" 15 | base64-serde = "0.8.0" 16 | -------------------------------------------------------------------------------- /examples/plugins/myip/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/myip/README.md: -------------------------------------------------------------------------------- 1 | # myip 2 | 3 | A example `hyper-mcp` plugin that tell you your IP address, using Cloudflare. 4 | 5 | This is an example of how to use HTTP with `hyper-mcp`. 6 | 7 | To use this, you will need to update your config like this. Note the `allowed_host` in `runtime_config` because we're using Cloudflare for this. 8 | 9 | ```json 10 | { 11 | "plugins": [ 12 | { 13 | "name": "time", 14 | "path": "/home/anh/Code/hyper-mcp/wasm/time.wasm" 15 | }, 16 | { 17 | "name": "qr-code", 18 | "path": "oci://ghcr.io/tuananh/qrcode-plugin:latest" 19 | }, 20 | { 21 | "name": "hash", 22 | "path": "oci://ghcr.io/tuananh/hash-plugin:latest" 23 | }, 24 | { 25 | "name": "myip", 26 | "path": "oci://ghcr.io/tuananh/myip-plugin:latest", 27 | "runtime_config": { 28 | "allowed_hosts": ["1.1.1.1"] 29 | } 30 | } 31 | ] 32 | } 33 | ``` -------------------------------------------------------------------------------- /examples/plugins/myip/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use extism_pdk::*; 4 | use pdk::types::*; 5 | use serde_json::json; 6 | 7 | pub(crate) fn call(_input: CallToolRequest) -> Result { 8 | let request = HttpRequest::new("https://1.1.1.1/cdn-cgi/trace"); 9 | let response = http::request::>(&request, None) 10 | .map_err(|e| Error::msg(format!("Failed to make HTTP request: {}", e)))?; 11 | 12 | let text = String::from_utf8(response.body().to_vec()) 13 | .map_err(|e| Error::msg(format!("Failed to parse response as UTF-8: {}", e)))?; 14 | 15 | // Parse the response to extract IP address 16 | let ip = text 17 | .lines() 18 | .find(|line| line.starts_with("ip=")) 19 | .map(|line| line.trim_start_matches("ip=")) 20 | .ok_or_else(|| Error::msg("Could not find IP address in response"))?; 21 | 22 | Ok(CallToolResult { 23 | is_error: None, 24 | content: vec![Content { 25 | annotations: None, 26 | text: Some(ip.to_string()), 27 | mime_type: Some("text/plain".into()), 28 | r#type: ContentType::Text, 29 | data: None, 30 | }], 31 | }) 32 | } 33 | 34 | pub(crate) fn describe() -> Result { 35 | Ok(ListToolsResult { 36 | tools: vec![ToolDescription { 37 | name: "myip".into(), 38 | description: "Get the current IP address using Cloudflare's service".into(), 39 | input_schema: json!({ 40 | "type": "object", 41 | "properties": {}, 42 | "required": [], 43 | }) 44 | .as_object() 45 | .unwrap() 46 | .clone(), 47 | }], 48 | }) 49 | } 50 | -------------------------------------------------------------------------------- /examples/plugins/myip/src/pdk.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_snake_case)] 2 | #![allow(unused_macros)] 3 | use extism_pdk::*; 4 | 5 | #[allow(unused)] 6 | fn panic_if_key_missing() -> ! { 7 | panic!("missing key"); 8 | } 9 | 10 | pub(crate) mod internal { 11 | pub(crate) fn return_error(e: extism_pdk::Error) -> i32 { 12 | let err = format!("{:?}", e); 13 | let mem = extism_pdk::Memory::from_bytes(&err).unwrap(); 14 | unsafe { 15 | extism_pdk::extism::error_set(mem.offset()); 16 | } 17 | -1 18 | } 19 | } 20 | 21 | #[allow(unused)] 22 | macro_rules! try_input { 23 | () => {{ 24 | let x = extism_pdk::input(); 25 | match x { 26 | Ok(x) => x, 27 | Err(e) => return internal::return_error(e), 28 | } 29 | }}; 30 | } 31 | 32 | #[allow(unused)] 33 | macro_rules! try_input_json { 34 | () => {{ 35 | let x = extism_pdk::input(); 36 | match x { 37 | Ok(extism_pdk::Json(x)) => x, 38 | Err(e) => return internal::return_error(e), 39 | } 40 | }}; 41 | } 42 | 43 | use base64_serde::base64_serde_type; 44 | 45 | base64_serde_type!(Base64Standard, base64::engine::general_purpose::STANDARD); 46 | 47 | mod exports { 48 | use super::*; 49 | 50 | #[unsafe(no_mangle)] 51 | pub extern "C" fn call() -> i32 { 52 | let ret = 53 | crate::call(try_input_json!()).and_then(|x| extism_pdk::output(extism_pdk::Json(x))); 54 | 55 | match ret { 56 | Ok(()) => 0, 57 | Err(e) => internal::return_error(e), 58 | } 59 | } 60 | 61 | #[unsafe(no_mangle)] 62 | pub extern "C" fn describe() -> i32 { 63 | let ret = crate::describe().and_then(|x| extism_pdk::output(extism_pdk::Json(x))); 64 | 65 | match ret { 66 | Ok(()) => 0, 67 | Err(e) => internal::return_error(e), 68 | } 69 | } 70 | } 71 | 72 | pub mod types { 73 | use super::*; 74 | 75 | #[derive( 76 | Default, 77 | Debug, 78 | Clone, 79 | serde::Serialize, 80 | serde::Deserialize, 81 | extism_pdk::FromBytes, 82 | extism_pdk::ToBytes, 83 | )] 84 | #[encoding(Json)] 85 | pub struct BlobResourceContents { 86 | /// A base64-encoded string representing the binary data of the item. 87 | #[serde(rename = "blob")] 88 | pub blob: String, 89 | 90 | /// The MIME type of this resource, if known. 91 | #[serde(rename = "mimeType")] 92 | #[serde(skip_serializing_if = "Option::is_none")] 93 | #[serde(default)] 94 | pub mime_type: Option, 95 | 96 | /// The URI of this resource. 97 | #[serde(rename = "uri")] 98 | pub uri: String, 99 | } 100 | 101 | #[derive( 102 | Default, 103 | Debug, 104 | Clone, 105 | serde::Serialize, 106 | serde::Deserialize, 107 | extism_pdk::FromBytes, 108 | extism_pdk::ToBytes, 109 | )] 110 | #[encoding(Json)] 111 | pub struct CallToolRequest { 112 | #[serde(rename = "method")] 113 | #[serde(skip_serializing_if = "Option::is_none")] 114 | #[serde(default)] 115 | pub method: Option, 116 | 117 | #[serde(rename = "params")] 118 | pub params: types::Params, 119 | } 120 | 121 | #[derive( 122 | Default, 123 | Debug, 124 | Clone, 125 | serde::Serialize, 126 | serde::Deserialize, 127 | extism_pdk::FromBytes, 128 | extism_pdk::ToBytes, 129 | )] 130 | #[encoding(Json)] 131 | pub struct CallToolResult { 132 | #[serde(rename = "content")] 133 | pub content: Vec, 134 | 135 | /// Whether the tool call ended in an error. 136 | /// 137 | /// If not set, this is assumed to be false (the call was successful). 138 | #[serde(rename = "isError")] 139 | #[serde(skip_serializing_if = "Option::is_none")] 140 | #[serde(default)] 141 | pub is_error: Option, 142 | } 143 | 144 | #[derive( 145 | Default, 146 | Debug, 147 | Clone, 148 | serde::Serialize, 149 | serde::Deserialize, 150 | extism_pdk::FromBytes, 151 | extism_pdk::ToBytes, 152 | )] 153 | #[encoding(Json)] 154 | pub struct Content { 155 | #[serde(rename = "annotations")] 156 | #[serde(skip_serializing_if = "Option::is_none")] 157 | #[serde(default)] 158 | pub annotations: Option, 159 | 160 | /// The base64-encoded image data. 161 | #[serde(rename = "data")] 162 | #[serde(skip_serializing_if = "Option::is_none")] 163 | #[serde(default)] 164 | pub data: Option, 165 | 166 | /// The MIME type of the image. Different providers may support different image types. 167 | #[serde(rename = "mimeType")] 168 | #[serde(skip_serializing_if = "Option::is_none")] 169 | #[serde(default)] 170 | pub mime_type: Option, 171 | 172 | /// The text content of the message. 173 | #[serde(rename = "text")] 174 | #[serde(skip_serializing_if = "Option::is_none")] 175 | #[serde(default)] 176 | pub text: Option, 177 | 178 | #[serde(rename = "type")] 179 | pub r#type: types::ContentType, 180 | } 181 | 182 | #[derive( 183 | Default, 184 | Debug, 185 | Clone, 186 | serde::Serialize, 187 | serde::Deserialize, 188 | extism_pdk::FromBytes, 189 | extism_pdk::ToBytes, 190 | )] 191 | #[encoding(Json)] 192 | pub enum ContentType { 193 | #[default] 194 | #[serde(rename = "text")] 195 | Text, 196 | #[serde(rename = "image")] 197 | Image, 198 | #[serde(rename = "resource")] 199 | Resource, 200 | } 201 | 202 | #[derive( 203 | Default, 204 | Debug, 205 | Clone, 206 | serde::Serialize, 207 | serde::Deserialize, 208 | extism_pdk::FromBytes, 209 | extism_pdk::ToBytes, 210 | )] 211 | #[encoding(Json)] 212 | pub struct ListToolsResult { 213 | /// The list of ToolDescription objects provided by this servlet. 214 | #[serde(rename = "tools")] 215 | pub tools: Vec, 216 | } 217 | 218 | #[derive( 219 | Default, 220 | Debug, 221 | Clone, 222 | serde::Serialize, 223 | serde::Deserialize, 224 | extism_pdk::FromBytes, 225 | extism_pdk::ToBytes, 226 | )] 227 | #[encoding(Json)] 228 | pub struct Params { 229 | #[serde(rename = "arguments")] 230 | #[serde(skip_serializing_if = "Option::is_none")] 231 | #[serde(default)] 232 | pub arguments: Option>, 233 | 234 | #[serde(rename = "name")] 235 | pub name: String, 236 | } 237 | 238 | #[derive( 239 | Default, 240 | Debug, 241 | Clone, 242 | serde::Serialize, 243 | serde::Deserialize, 244 | extism_pdk::FromBytes, 245 | extism_pdk::ToBytes, 246 | )] 247 | #[encoding(Json)] 248 | pub enum Role { 249 | #[default] 250 | #[serde(rename = "assistant")] 251 | Assistant, 252 | #[serde(rename = "user")] 253 | User, 254 | } 255 | 256 | #[derive( 257 | Default, 258 | Debug, 259 | Clone, 260 | serde::Serialize, 261 | serde::Deserialize, 262 | extism_pdk::FromBytes, 263 | extism_pdk::ToBytes, 264 | )] 265 | #[encoding(Json)] 266 | pub struct TextAnnotation { 267 | /// Describes who the intended customer of this object or data is. 268 | /// 269 | /// It can include multiple entries to indicate content useful for multiple audiences (e.g., `["user", "assistant"]`). 270 | #[serde(rename = "audience")] 271 | pub audience: Vec, 272 | 273 | /// Describes how important this data is for operating the server. 274 | /// 275 | /// A value of 1 means "most important," and indicates that the data is 276 | /// effectively required, while 0 means "least important," and indicates that 277 | /// the data is entirely optional. 278 | #[serde(rename = "priority")] 279 | pub priority: f32, 280 | } 281 | 282 | #[derive( 283 | Default, 284 | Debug, 285 | Clone, 286 | serde::Serialize, 287 | serde::Deserialize, 288 | extism_pdk::FromBytes, 289 | extism_pdk::ToBytes, 290 | )] 291 | #[encoding(Json)] 292 | pub struct TextResourceContents { 293 | /// The MIME type of this resource, if known. 294 | #[serde(rename = "mimeType")] 295 | #[serde(skip_serializing_if = "Option::is_none")] 296 | #[serde(default)] 297 | pub mime_type: Option, 298 | 299 | /// The text of the item. This must only be set if the item can actually be represented as text (not binary data). 300 | #[serde(rename = "text")] 301 | pub text: String, 302 | 303 | /// The URI of this resource. 304 | #[serde(rename = "uri")] 305 | pub uri: String, 306 | } 307 | 308 | #[derive( 309 | Default, 310 | Debug, 311 | Clone, 312 | serde::Serialize, 313 | serde::Deserialize, 314 | extism_pdk::FromBytes, 315 | extism_pdk::ToBytes, 316 | )] 317 | #[encoding(Json)] 318 | pub struct ToolDescription { 319 | /// A description of the tool 320 | #[serde(rename = "description")] 321 | pub description: String, 322 | 323 | /// The JSON schema describing the argument input 324 | #[serde(rename = "inputSchema")] 325 | pub input_schema: serde_json::Map, 326 | 327 | /// The name of the tool. It should match the plugin / binding name. 328 | #[serde(rename = "name")] 329 | pub name: String, 330 | } 331 | } 332 | 333 | mod raw_imports { 334 | use super::*; 335 | #[host_fn] 336 | extern "ExtismHost" {} 337 | } -------------------------------------------------------------------------------- /examples/plugins/qdrant/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | rustflags = ["--cfg", "tokio_unstable"] 4 | -------------------------------------------------------------------------------- /examples/plugins/qdrant/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/qdrant/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "qdrant" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0.219", features = ["derive"] } 13 | serde_json = "1.0.140" 14 | base64-serde = "0.8.0" 15 | base64 = "0.22.1" 16 | 17 | anyhow = "1.0.98" 18 | uuid = { version = "1.16.0", features = ["v4"] } 19 | -------------------------------------------------------------------------------- /examples/plugins/qdrant/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/qdrant/README.md: -------------------------------------------------------------------------------- 1 | # qdrant 2 | 3 | A plugin that provides vector similarity search capabilities using Qdrant vector database. 4 | 5 | ## What it does 6 | 7 | This plugin provides three main functionalities: 8 | 1. Create collections with configurable vector dimensions 9 | 2. Store documents with their vector embeddings in Qdrant 10 | 3. Search for similar documents using vector embeddings 11 | 12 | ## Configuration 13 | 14 | The plugin requires the following configuration: 15 | 16 | ```json 17 | { 18 | "plugins": [ 19 | { 20 | "name": "qdrant", 21 | "path": "oci://ghcr.io/tuananh/qdrant-plugin:latest", 22 | "runtime_config": { 23 | "QDRANT_URL": "http://localhost:6334", 24 | "allowed_hosts": [ 25 | "localhost:6333" 26 | ], 27 | "env_vars": { 28 | "QDRANT_URL": "http://localhost:6333" 29 | } 30 | } 31 | } 32 | ] 33 | } 34 | ``` 35 | 36 | ## Tools 37 | 38 | ### 1. qdrant_create_collection 39 | 40 | Creates a new collection in Qdrant with specified vector dimensions. 41 | 42 | ```json 43 | { 44 | "collection_name": "my_documents", 45 | "vector_size": 384 // Optional, defaults to 384 46 | } 47 | ``` 48 | 49 | ### 2. qdrant_store 50 | 51 | Stores a document with its vector embedding in Qdrant. 52 | 53 | ```json 54 | { 55 | "collection_name": "my_documents", 56 | "text": "Your document text", 57 | "vector": [0.1, 0.2, ...] // Vector dimensions must match collection's vector_size 58 | } 59 | ``` 60 | 61 | ### 3. qdrant_find 62 | 63 | Finds similar documents using vector similarity search. 64 | 65 | ```json 66 | { 67 | "collection_name": "my_documents", 68 | "vector": [0.1, 0.2, ...], // Vector dimensions must match collection's vector_size 69 | "limit": 5 // Optional, defaults to 5 70 | } 71 | ``` 72 | 73 | ## Features 74 | 75 | - Configurable vector dimensions per collection 76 | - Support for vector-based queries 77 | - Configurable similarity search results limit 78 | - Uses cosine similarity for vector matching 79 | - Thread-safe operations 80 | 81 | ## Dependencies 82 | 83 | - Qdrant for vector storage and similarity search 84 | - UUID for document identification -------------------------------------------------------------------------------- /examples/plugins/qr-code/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /examples/plugins/qr-code/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mcp-qr-code" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "qrcode" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64-serde = "0.7" 15 | base64 = "0.21" 16 | qrcode-png = "0.4.1" 17 | -------------------------------------------------------------------------------- /examples/plugins/qr-code/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/qrcode.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/qr-code/README.md: -------------------------------------------------------------------------------- 1 | qr-code 2 | ======= 3 | 4 | Source: [mcp.run-servlets](https://github.com/dylibso/mcp.run-servlets/tree/main/servlets/qr-code) 5 | -------------------------------------------------------------------------------- /examples/plugins/qr-code/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use base64::Engine; 4 | use extism_pdk::*; 5 | use pdk::types::*; 6 | use qrcode_png::{Color, QrCode, QrCodeEcc}; 7 | use serde_json::{json, Map, Value}; 8 | 9 | pub(crate) fn call(input: CallToolRequest) -> Result { 10 | extism_pdk::log!( 11 | LogLevel::Info, 12 | "called with args: {:?}", 13 | input.params.arguments 14 | ); 15 | let args = input.params.arguments.unwrap_or_default(); 16 | let ecc = to_ecc( 17 | args.get("ecc") 18 | .cloned() 19 | .unwrap_or_else(|| json!(4)) 20 | .as_number() 21 | .unwrap() 22 | .is_u64() as u8, 23 | ); 24 | 25 | let data = match args.get("data") { 26 | Some(v) => v.as_str().unwrap(), 27 | None => return Err(Error::msg("`data` must be available")), 28 | }; 29 | 30 | let mut code = QrCode::new(data, ecc)?; 31 | code.margin(10); 32 | code.zoom(10); 33 | 34 | let b = code.generate(Color::Grayscale(0, 255))?; 35 | let data = base64::engine::general_purpose::STANDARD.encode(b); 36 | 37 | Ok(CallToolResult { 38 | is_error: None, 39 | content: vec![Content { 40 | annotations: None, 41 | text: None, 42 | mime_type: Some("image/png".into()), 43 | r#type: ContentType::Image, 44 | data: Some(data), 45 | }], 46 | }) 47 | } 48 | 49 | fn to_ecc(num: u8) -> QrCodeEcc { 50 | if num < 4 { 51 | return unsafe { std::mem::transmute::(num) }; 52 | } 53 | 54 | QrCodeEcc::High 55 | } 56 | 57 | pub(crate) fn describe() -> Result { 58 | Ok(ListToolsResult { 59 | tools: vec![ToolDescription { 60 | name: "qr-code".into(), 61 | description: "Convert data like a message or URL to a QR code (resulting in a PNG file)".into(), 62 | input_schema: json!({ 63 | "type": "object", 64 | "properties": { 65 | "data": { 66 | "type": "string", 67 | "description": "data to convert to a QR code PNG" 68 | }, 69 | "ecc": { 70 | "type": "number", 71 | "description": "Error correction level (range from 1 [low] to 4 [high], default to 4 unless user specifies)" 72 | } 73 | }, 74 | "required": ["data"] 75 | }).as_object().unwrap().clone(), 76 | }], 77 | }) 78 | } 79 | -------------------------------------------------------------------------------- /examples/plugins/sqlite/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/sqlite/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/sqlite/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sqlite" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | serde = { version = "1.0", features = ["derive"] } 13 | serde_json = "1.0" 14 | base64 = "0.21" 15 | base64-serde = "0.8.0" 16 | rusqlite = { version = "0.34.0", features = ["bundled"] } 17 | 18 | [build-dependencies] 19 | cc = "1.0" 20 | -------------------------------------------------------------------------------- /examples/plugins/sqlite/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | # Install wasi-sdk 8 | ENV WASI_OS=linux \ 9 | WASI_VERSION=25 \ 10 | WASI_VERSION_FULL=25.0 11 | 12 | # Detect architecture and set WASI_ARCH accordingly 13 | RUN apt-get update && apt-get install -y wget && \ 14 | ARCH=$(uname -m) && \ 15 | if [ "$ARCH" = "x86_64" ]; then \ 16 | export WASI_ARCH=x86_64; \ 17 | elif [ "$ARCH" = "aarch64" ]; then \ 18 | export WASI_ARCH=arm64; \ 19 | else \ 20 | echo "Unsupported architecture: $ARCH" && exit 1; \ 21 | fi && \ 22 | cd /opt && \ 23 | wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_VERSION}/wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 24 | tar xvf wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 25 | rm wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS}.tar.gz && \ 26 | mv wasi-sdk-${WASI_VERSION_FULL}-${WASI_ARCH}-${WASI_OS} wasi-sdk 27 | 28 | WORKDIR /workspace 29 | COPY . . 30 | RUN cargo fetch 31 | ENV WASI_SDK_PATH=/opt/wasi-sdk 32 | ENV CC_wasm32_wasip1="${WASI_SDK_PATH}/bin/clang --sysroot=${WASI_SDK_PATH}/share/wasi-sysroot" 33 | 34 | RUN cargo auditable build --release --target wasm32-wasip1 35 | 36 | FROM scratch 37 | WORKDIR / 38 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 39 | -------------------------------------------------------------------------------- /examples/plugins/sqlite/README.md: -------------------------------------------------------------------------------- 1 | # sqlite 2 | 3 | A plugin that provide SQLite interactions for `hyper-mcp`. 4 | 5 | ## Usage 6 | 7 | Call with: 8 | ```json 9 | { 10 | "plugins": [ 11 | // {}, 12 | { 13 | "name": "sqlite", 14 | "path": "oci://ghcr.io/tuananh/sqlite-plugin", 15 | "runtime_config": { 16 | "allowed_paths": ["/tmp"], 17 | "env_vars": { 18 | "db_path": "/tmp/memory.db" 19 | } 20 | } 21 | } 22 | ] 23 | } 24 | 25 | ``` 26 | 27 | ## How to build 28 | 29 | This plugin requires you to have [wasi-sdk](https://github.com/WebAssembly/wasi-sdk) installed. 30 | 31 | ```sh 32 | export WASI_SDK_PATH=`` # in my case, it's /opt/wasi-sdk 33 | export CC_wasm32_wasip1="${WASI_SDK_PATH}/bin/clang --sysroot=${WASI_SDK_PATH}/share/wasi-sysroot" 34 | cargo build --release --target wasm32-wasip1 35 | ``` 36 | 37 | See [Dockerfile](./Dockerfile) for reference. 38 | -------------------------------------------------------------------------------- /examples/plugins/think/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/think/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/think/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "think" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "plugin" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "=1.4.0" 12 | serde = { version = "1.0.219", features = ["derive"] } 13 | serde_json = "1.0.140" 14 | base64-serde = "0.8.0" 15 | base64 = "0.22.1" 16 | -------------------------------------------------------------------------------- /examples/plugins/think/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/plugin.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/think/README.md: -------------------------------------------------------------------------------- 1 | # think 2 | 3 | A simple MCP plugin that returns the provided thought string. Useful for agentic reasoning, cache memory, or when you want to "think out loud" in a workflow. 4 | 5 | ## What it does 6 | 7 | Takes a `thought` parameter (string) and simply returns it as the result. No side effects, no logging, no database or network calls. 8 | 9 | Read more about the think tool in [this blog post](https://www.anthropic.com/engineering/claude-think-tool). 10 | 11 | ## Usage 12 | 13 | Call with: 14 | ```json 15 | { 16 | "plugins": [ 17 | { 18 | "name": "think", 19 | "path": "oci://ghcr.io/tuananh/think-plugin:latest" 20 | } 21 | ] 22 | } 23 | ``` 24 | 25 | ### Example 26 | 27 | Tool call: 28 | ```json 29 | { 30 | "name": "think", 31 | "arguments": { "thought": "I should try a different approach." } 32 | } 33 | ``` 34 | Returns: 35 | ```json 36 | "I should try a different approach." 37 | ``` 38 | 39 | ## Example usage with Cursor/Windsurf 40 | 41 | Add a new Cursor/Windsurf rule like the following 42 | 43 | ``` 44 | After any context change (viewing new files, running commands, or receiving tool outputs), use the "think" tool to organize your reasoning before responding. 45 | 46 | Specifically, always use the think tool when: 47 | - After examining file contents or project structure 48 | - After running terminal commands or analyzing their outputs 49 | - After receiving search results or API responses 50 | - Before making code suggestions or explaining complex concepts 51 | - When transitioning between different parts of a task 52 | 53 | When using the think tool: 54 | - List the specific rules or constraints that apply to the current task 55 | - Check if all required information is collected 56 | - Verify that your planned approach is correct 57 | - Break down complex problems into clearly defined steps 58 | - Analyze outputs from other tools thoroughly 59 | - Plan multi-step approaches before executing them 60 | 61 | The think tool has been proven to improve performance by up to 54% on complex tasks, especially when working with multiple tools or following detailed policies. 62 | ``` -------------------------------------------------------------------------------- /examples/plugins/think/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use extism_pdk::*; 4 | use json::Value; 5 | use pdk::types::{ 6 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription, 7 | }; 8 | use serde_json::json; 9 | 10 | 11 | 12 | pub(crate) fn call(input: CallToolRequest) -> Result { 13 | match input.params.name.as_str() { 14 | "think" => think(input), 15 | _ => Ok(CallToolResult { 16 | is_error: Some(true), 17 | content: vec![Content { 18 | annotations: None, 19 | text: Some(format!("Unknown tool: {}", input.params.name)), 20 | mime_type: None, 21 | r#type: ContentType::Text, 22 | data: None, 23 | }], 24 | }), 25 | } 26 | } 27 | 28 | fn think(input: CallToolRequest) -> Result { 29 | let args = input.params.arguments.unwrap_or_default(); 30 | if let Some(Value::String(thought)) = args.get("thought") { 31 | Ok(CallToolResult { 32 | is_error: None, 33 | content: vec![Content { 34 | annotations: None, 35 | text: Some(thought.clone()), 36 | mime_type: Some("text/plain".to_string()), 37 | r#type: ContentType::Text, 38 | data: None, 39 | }], 40 | }) 41 | } else { 42 | Ok(CallToolResult { 43 | is_error: Some(true), 44 | content: vec![Content { 45 | annotations: None, 46 | text: Some("Please provide a 'thought' string.".into()), 47 | mime_type: None, 48 | r#type: ContentType::Text, 49 | data: None, 50 | }], 51 | }) 52 | } 53 | } 54 | 55 | pub(crate) fn describe() -> Result { 56 | Ok(ListToolsResult{ 57 | tools: vec![ 58 | ToolDescription { 59 | name: "think".into(), 60 | description: "Use the tool to think about something. It will not obtain new information or change the database, but just append the thought to the log. Use it when complex reasoning or some cache memory is needed.".into(), 61 | input_schema: json!({ 62 | "type": "object", 63 | "properties": { 64 | "thought": { 65 | "type": "string", 66 | "description": "A thought to think about.", 67 | }, 68 | }, 69 | "required": ["thought"], 70 | }) 71 | .as_object() 72 | .unwrap() 73 | .clone(), 74 | }, 75 | ], 76 | }) 77 | } 78 | -------------------------------------------------------------------------------- /examples/plugins/time/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-wasip1" 3 | -------------------------------------------------------------------------------- /examples/plugins/time/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ 22 | -------------------------------------------------------------------------------- /examples/plugins/time/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "time" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | name = "time" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | extism-pdk = "1.4.0" 12 | chrono = { version = "0.4", features = ["serde"] } 13 | serde = { version = "1.0", features = ["derive"] } 14 | serde_json = "1.0" 15 | base64-serde = "0.7" 16 | base64 = "0.21" -------------------------------------------------------------------------------- /examples/plugins/time/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.86-slim AS builder 2 | 3 | RUN rustup target add wasm32-wasip1 && \ 4 | rustup component add rust-std --target wasm32-wasip1 && \ 5 | cargo install cargo-auditable 6 | 7 | WORKDIR /workspace 8 | COPY . . 9 | RUN cargo fetch 10 | RUN cargo auditable build --release --target wasm32-wasip1 11 | 12 | FROM scratch 13 | WORKDIR / 14 | COPY --from=builder /workspace/target/wasm32-wasip1/release/time.wasm /plugin.wasm 15 | -------------------------------------------------------------------------------- /examples/plugins/time/README.md: -------------------------------------------------------------------------------- 1 | # time 2 | 3 | src: https://github.com/dylibso/mcp.run-servlets/tree/main/servlets/time -------------------------------------------------------------------------------- /examples/plugins/time/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod pdk; 2 | 3 | use extism_pdk::*; 4 | use pdk::types::{CallToolResult, Content, ContentType, ToolDescription}; 5 | use pdk::*; 6 | use serde_json::json; 7 | use std::error::Error as StdError; 8 | 9 | use chrono::Utc; 10 | 11 | #[derive(Debug)] 12 | struct CustomError(String); 13 | 14 | impl std::fmt::Display for CustomError { 15 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 16 | write!(f, "{}", self.0) 17 | } 18 | } 19 | 20 | impl StdError for CustomError {} 21 | 22 | // Called when the tool is invoked. 23 | pub(crate) fn call(input: types::CallToolRequest) -> Result { 24 | let args = input.params.arguments.unwrap_or_default(); 25 | let name = args.get("name").unwrap().as_str().unwrap(); 26 | match name { 27 | "get_time_utc" => { 28 | let now = Utc::now(); 29 | let timestamp = now.timestamp().to_string(); 30 | let rfc2822 = now.to_rfc2822().to_string(); 31 | Ok(CallToolResult { 32 | content: vec![Content { 33 | text: Some(json!({ 34 | "utc_time": timestamp, 35 | "utc_time_rfc2822": rfc2822, 36 | }).to_string()), 37 | r#type: ContentType::Text, 38 | ..Default::default() 39 | }], 40 | is_error: Some(false), 41 | }) 42 | } 43 | "parse_time" => { 44 | let time = args.get("time_rfc2822").unwrap().as_str().unwrap(); 45 | let t = chrono::DateTime::parse_from_rfc2822(time).unwrap(); 46 | let timestamp = t.timestamp().to_string(); 47 | let rfc2822 = t.to_rfc2822().to_string(); 48 | Ok(CallToolResult { 49 | content: vec![Content { 50 | text: Some(json!({ 51 | "utc_time": timestamp, 52 | "utc_time_rfc2822": rfc2822, 53 | }).to_string()), 54 | r#type: ContentType::Text, 55 | ..Default::default() 56 | }], 57 | is_error: Some(false), 58 | }) 59 | } 60 | "time_offset" => { 61 | let t1 = args.get("timestamp").unwrap().as_i64().unwrap(); 62 | let offset = args.get("offset").unwrap().as_i64().unwrap(); 63 | let t1 = chrono::DateTime::from_timestamp(t1, 0).unwrap(); 64 | let t2 = t1 + chrono::Duration::seconds(offset); 65 | let timestamp = t2.timestamp().to_string(); 66 | let rfc2822 = t2.to_rfc2822().to_string(); 67 | Ok(CallToolResult { 68 | content: vec![Content { 69 | text: Some(json!({ 70 | "utc_time": timestamp, 71 | "utc_time_rfc2822": rfc2822, 72 | }).to_string()), 73 | r#type: ContentType::Text, 74 | ..Default::default() 75 | }], 76 | is_error: Some(false), 77 | }) 78 | } 79 | _ => Err(Error::new(CustomError("unknown command".to_string()))), 80 | } 81 | } 82 | 83 | pub(crate) fn describe() -> Result { 84 | Ok(types::ListToolsResult { tools: vec![ToolDescription { 85 | name: "time".into(), 86 | description: "Time operations plugin. It provides the following operations: 87 | 88 | - `get_time_utc`: Returns the current time in the UTC timezone. Takes no parameters. 89 | - `parse_time`: Takes a `time_rfc2822` string in RFC2822 format and returns the timestamp in UTC timezone. 90 | - `time_offset`: Takes integer `timestamp` and `offset` parameters. Adds a time offset to a given timestamp and returns the new timestamp in UTC timezone. 91 | 92 | Always use this tool to compute time operations, especially when it is necessary 93 | to compute time differences or offsets.".into(), 94 | input_schema: json!({ 95 | "type": "object", 96 | "required": ["name"], 97 | "properties": { 98 | "name": { 99 | "type": "string", 100 | "description": "The name of the operation to perform. ", 101 | "enum": ["get_time_utc", "time_offset", "parse_time"], 102 | }, 103 | "timestamp": { 104 | "type": "integer", 105 | "description": "The timestamp used for `time_offset`.", 106 | }, 107 | "offset" : { 108 | "type": "integer", 109 | "description": "The offset to add to the time in seconds. ", 110 | }, 111 | "time_rfc2822": { 112 | "type": "string", 113 | "description": "The time in RFC2822 format used in `parse_time`", 114 | }, 115 | }, 116 | }) 117 | .as_object() 118 | .unwrap() 119 | .clone(), 120 | }]}) 121 | } 122 | -------------------------------------------------------------------------------- /examples/plugins/time/time.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tuananh/hyper-mcp/426027e39cb6f336d0611e9679757f78d1695953/examples/plugins/time/time.wasm -------------------------------------------------------------------------------- /iac/.terraform.lock.hcl: -------------------------------------------------------------------------------- 1 | # This file is maintained automatically by "terraform init". 2 | # Manual edits may be lost in future updates. 3 | 4 | provider "registry.terraform.io/hashicorp/google" { 5 | version = "4.85.0" 6 | constraints = ">= 3.53.0, < 5.0.0" 7 | hashes = [ 8 | "h1:aSRZcEKF2wOi/v24IA+k9J2Y7aKVV1cHi/R0V3EhxXQ=", 9 | "zh:17d60a6a6c1741cf1e09ac6731433a30950285eac88236e623ab4cbf23832ca3", 10 | "zh:1c70254c016439dbb75cab646b4beace6ceeff117c75d81f2cc27d41c312f752", 11 | "zh:35e2aa2cc7ac84ce55e05bb4de7b461b169d3582e56d3262e249ff09d64fe008", 12 | "zh:417afb08d7b2744429f6b76806f4134d62b0354acf98e8a6c00de3c24f2bb6ad", 13 | "zh:622165d09d21d9a922c86f1fc7177a400507f2a8c4a4513114407ae04da2dd29", 14 | "zh:7cdb8e39a8ea0939558d87d2cb6caceded9e21f21003d9e9f9ce648d5db0bc3a", 15 | "zh:851e737dc551d6004a860a8907fda65118fc2c7ede9fa828f7be704a2a39e68f", 16 | "zh:a331ad289a02a2c4473572a573dc389be0a604cdd9e03dd8dbc10297fb14f14d", 17 | "zh:b67fd531251380decd8dd1f849460d60f329f89df3d15f5815849a1dd001f430", 18 | "zh:be8785957acca4f97aa3e800b313b57d1fca07788761c8867c9bc701fbe0bdb5", 19 | "zh:cb6579a259fe020e1f88217d8f6937b2d5ace15b6406370977a1966eb31b1ca5", 20 | "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", 21 | ] 22 | } 23 | 24 | provider "registry.terraform.io/hashicorp/google-beta" { 25 | version = "4.85.0" 26 | constraints = ">= 4.40.0, < 5.0.0" 27 | hashes = [ 28 | "h1:YkCDGkP0AUZoNobLoxRnM52Pi4alYE9EFXalEu8p8E8=", 29 | "zh:40e9c7ec46955b4d79065a14185043a4ad6af8d0246715853fc5c99208b66980", 30 | "zh:5950a9ba2f96420ea5335b543e315b1a47a705f9a9abfc53c6fec52d084eddcb", 31 | "zh:5dfa98d32246a5d97e018f2b91b0e921cc6f061bc8591884f3b144f0d62f1c20", 32 | "zh:628d0ca35c6d4c35077859bb0a5534c1de44f23a91e190f9c3f06f2358172e75", 33 | "zh:6e78d54fd4de4151968149b4c3521f563a8b5c55aad423dba5968a9114b65ae4", 34 | "zh:91c3bc443188638353285bd35b06d3a3b39b42b3b4cc0637599a430438fba2f7", 35 | "zh:9e91b03363ebf39eea5ec0fbe7675f6979883aa9ad9a36664357d8513a007cf3", 36 | "zh:db9a8d6bfe075fb38c260986ab557d40e8d18e5698c62956a6da8120fae01d59", 37 | "zh:e41169c49f3bb53217905509e2ba8bb4680c373e1f54db7fac1b7f72943a1004", 38 | "zh:f32f55a8af605afbc940814e17493ac83d9d66cd6da9bbc247e0a833a0aa37ec", 39 | "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", 40 | "zh:f6561a6badc3af842f9ad5bb926104954047f07cb90fadcca1357441cc67d91d", 41 | ] 42 | } 43 | 44 | provider "registry.terraform.io/hashicorp/random" { 45 | version = "3.7.1" 46 | constraints = ">= 2.1.0" 47 | hashes = [ 48 | "h1:/qtweZW2sk0kBNiQM02RvBXmlVdI9oYqRMCyBZ8XA98=", 49 | "zh:3193b89b43bf5805493e290374cdda5132578de6535f8009547c8b5d7a351585", 50 | "zh:3218320de4be943e5812ed3de995946056db86eb8d03aa3f074e0c7316599bef", 51 | "zh:419861805a37fa443e7d63b69fb3279926ccf98a79d256c422d5d82f0f387d1d", 52 | "zh:4df9bd9d839b8fc11a3b8098a604b9b46e2235eb65ef15f4432bde0e175f9ca6", 53 | "zh:5814be3f9c9cc39d2955d6f083bae793050d75c572e70ca11ccceb5517ced6b1", 54 | "zh:63c6548a06de1231c8ee5570e42ca09c4b3db336578ded39b938f2156f06dd2e", 55 | "zh:697e434c6bdee0502cc3deb098263b8dcd63948e8a96d61722811628dce2eba1", 56 | "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", 57 | "zh:a0b8e44927e6327852bbfdc9d408d802569367f1e22a95bcdd7181b1c3b07601", 58 | "zh:b7d3af018683ef22794eea9c218bc72d7c35a2b3ede9233b69653b3c782ee436", 59 | "zh:d63b911d618a6fe446c65bfc21e793a7663e934b2fef833d42d3ccd38dd8d68d", 60 | "zh:fa985cd0b11e6d651f47cff3055f0a9fd085ec190b6dbe99bf5448174434cdea", 61 | ] 62 | } 63 | -------------------------------------------------------------------------------- /iac/main.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | google = { 4 | source = "hashicorp/google" 5 | } 6 | } 7 | } 8 | 9 | provider "google" { 10 | project = var.project_id 11 | } 12 | 13 | resource "google_service_account" "my-app" { 14 | account_id = "${var.name}-my-app" 15 | } 16 | 17 | # Create a secret for the config file 18 | resource "google_secret_manager_secret" "hyper-mcp-config" { 19 | secret_id = "${var.name}-config" 20 | 21 | replication { 22 | auto {} 23 | } 24 | } 25 | 26 | # Add the config file content to the secret 27 | resource "google_secret_manager_secret_version" "hyper-mcp-config-version" { 28 | secret = google_secret_manager_secret.hyper-mcp-config.id 29 | secret_data = <, 9 | } 10 | 11 | #[derive(Debug, Serialize, Deserialize, Clone)] 12 | pub struct PluginConfig { 13 | pub name: String, 14 | pub path: String, 15 | pub runtime_config: Option, 16 | } 17 | 18 | #[derive(Debug, Serialize, Deserialize, Clone, Default)] 19 | pub struct RuntimeConfig { 20 | // List of tool names to skip loading at runtime. 21 | pub skip_tools: Option>, 22 | pub allowed_hosts: Option>, 23 | pub allowed_paths: Option>, 24 | pub env_vars: Option>, 25 | pub memory_limit: Option, 26 | } 27 | 28 | pub async fn load_config(path: &Path) -> Result { 29 | if !path.exists() { 30 | return Err(anyhow::anyhow!( 31 | "Config file not found at: {}. Please create a config file first.", 32 | path.display() 33 | )); 34 | } 35 | let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); 36 | 37 | let content = tokio::fs::read_to_string(path) 38 | .await 39 | .with_context(|| format!("Failed to read config file at {}", path.display()))?; 40 | 41 | let config = match ext { 42 | "json" => serde_json::from_str(&content)?, 43 | "yaml" | "yml" => serde_yaml::from_str(&content)?, 44 | "toml" => toml::from_str(&content)?, 45 | _ => return Err(anyhow::anyhow!("Unsupported config format: {}", ext)), 46 | }; 47 | 48 | Ok(config) 49 | } 50 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::Parser; 3 | use rmcp::transport::sse_server::SseServer; 4 | use rmcp::transport::streamable_http_server::{ 5 | StreamableHttpService, session::local::LocalSessionManager, 6 | }; 7 | use rmcp::{ServiceExt, transport::stdio}; 8 | use std::path::PathBuf; 9 | use tracing_subscriber::{self, EnvFilter}; 10 | 11 | mod config; 12 | mod oci; 13 | mod plugins; 14 | 15 | pub const DEFAULT_BIND_ADDRESS: &str = "127.0.0.1:3001"; 16 | 17 | #[derive(Parser, Clone)] 18 | #[command(author = "Tuan Anh Tran ", version = env!("CARGO_PKG_VERSION"), about, long_about = None)] 19 | struct Cli { 20 | #[arg(short, long, value_name = "FILE")] 21 | config_file: Option, 22 | 23 | #[arg( 24 | long = "log-level", 25 | value_name = "LEVEL", 26 | env = "HYPER_MCP_LOG_LEVEL", 27 | default_value = "info" 28 | )] 29 | log_level: Option, 30 | 31 | #[arg( 32 | long = "transport", 33 | value_name = "TRANSPORT", 34 | env = "HYPER_MCP_TRANSPORT", 35 | default_value = "stdio", 36 | value_parser = ["stdio", "sse", "streamable-http"] 37 | )] 38 | transport: String, 39 | 40 | #[arg( 41 | long = "bind-address", 42 | value_name = "ADDRESS", 43 | env = "HYPER_MCP_BIND_ADDRESS", 44 | default_value = DEFAULT_BIND_ADDRESS 45 | )] 46 | bind_address: String, 47 | 48 | #[arg( 49 | long = "insecure-skip-signature", 50 | help = "Skip OCI image signature verification", 51 | env = "HYPER_MCP_INSECURE_SKIP_SIGNATURE", 52 | default_value = "false" 53 | )] 54 | insecure_skip_signature: bool, 55 | 56 | #[arg( 57 | long = "use-sigstore-tuf-data", 58 | help = "Use Sigstore TUF data for verification", 59 | env = "HYPER_MCP_USE_SIGSTORE_TUF_DATA", 60 | default_value = "true" 61 | )] 62 | use_sigstore_tuf_data: bool, 63 | 64 | #[arg( 65 | long = "rekor-pub-keys", 66 | help = "Path to Rekor public keys for verification", 67 | env = "HYPER_MCP_REKOR_PUB_KEYS" 68 | )] 69 | rekor_pub_keys: Option, 70 | 71 | #[arg( 72 | long = "fulcio-certs", 73 | help = "Path to Fulcio certificates for verification", 74 | env = "HYPER_MCP_FULCIO_CERTS" 75 | )] 76 | fulcio_certs: Option, 77 | 78 | #[arg( 79 | long = "cert-issuer", 80 | help = "Certificate issuer to verify against", 81 | env = "HYPER_MCP_CERT_ISSUER" 82 | )] 83 | cert_issuer: Option, 84 | 85 | #[arg( 86 | long = "cert-email", 87 | help = "Certificate email to verify against", 88 | env = "HYPER_MCP_CERT_EMAIL" 89 | )] 90 | cert_email: Option, 91 | 92 | #[arg( 93 | long = "cert-url", 94 | help = "Certificate URL to verify against", 95 | env = "HYPER_MCP_CERT_URL" 96 | )] 97 | cert_url: Option, 98 | } 99 | 100 | #[tokio::main] 101 | async fn main() -> Result<()> { 102 | let cli = Cli::parse(); 103 | 104 | let log_level = cli.log_level.clone().unwrap_or_else(|| "info".to_string()); 105 | tracing_subscriber::fmt() 106 | .with_env_filter(EnvFilter::from_default_env().add_directive(log_level.parse().unwrap())) 107 | .with_writer(std::io::stderr) 108 | .with_ansi(false) 109 | .init(); 110 | 111 | tracing::info!("Starting hyper-mcp server"); 112 | 113 | // Get default config path in the user's config directory 114 | let default_config_path = dirs::config_dir() 115 | .map(|mut path| { 116 | path.push("hyper-mcp"); 117 | path.push("config.json"); 118 | path 119 | }) 120 | .unwrap(); 121 | 122 | // Extract config_file before using cli elsewhere to avoid borrow issues 123 | let config_file = cli.config_file.clone(); 124 | let config_path = config_file.unwrap_or(default_config_path); 125 | tracing::info!("Using config file at {}", config_path.display()); 126 | 127 | let config = config::load_config(&config_path).await?; 128 | 129 | // Create plugin service with the config and CLI options 130 | let plugin_service = plugins::PluginService::new(config, &cli).await?; 131 | 132 | match cli.transport.as_str() { 133 | "stdio" => { 134 | let service = plugin_service.serve(stdio()).await.inspect_err(|e| { 135 | tracing::error!("Serving error: {:?}", e); 136 | })?; 137 | service.waiting().await?; 138 | } 139 | "sse" => { 140 | tracing::info!( 141 | "Starting hyper-mcp with SSE transport at {}", 142 | cli.bind_address 143 | ); 144 | let ct = SseServer::serve(cli.bind_address.parse()?) 145 | .await? 146 | .with_service(move || plugin_service.clone()); 147 | 148 | tokio::signal::ctrl_c().await?; 149 | ct.cancel(); 150 | } 151 | "streamable-http" => { 152 | tracing::info!( 153 | "Starting hyper-mcp with streamable-http transport at {}/mcp", 154 | cli.bind_address 155 | ); 156 | 157 | let service = StreamableHttpService::new( 158 | move || plugin_service.clone(), 159 | LocalSessionManager::default().into(), 160 | Default::default(), 161 | ); 162 | 163 | let router = axum::Router::new().nest_service("/mcp", service); 164 | 165 | let tcp_listener = tokio::net::TcpListener::bind(cli.bind_address).await?; 166 | let _ = axum::serve(tcp_listener, router) 167 | .with_graceful_shutdown(async { 168 | tokio::signal::ctrl_c().await.unwrap(); 169 | tracing::info!("Received Ctrl+C, shutting down hyper-mcp server..."); 170 | // Give the log a moment to flush 171 | tokio::time::sleep(std::time::Duration::from_millis(100)).await; 172 | std::process::exit(0); 173 | }) 174 | .await; 175 | } 176 | _ => unreachable!(), 177 | } 178 | 179 | Ok(()) 180 | } 181 | -------------------------------------------------------------------------------- /src/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | pub mod plugins; 3 | pub mod oci; 4 | pub mod tools; 5 | --------------------------------------------------------------------------------