├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── actions │ ├── setup-codeclimate │ │ └── action.yml │ └── setup │ │ └── action.yml └── workflows │ ├── main.yml │ └── release.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .qlty └── qlty.toml ├── CODE_OF_CONDUCT.md ├── Dockerfile ├── LICENSE ├── README.md ├── bun.lockb ├── docker-compose.test.yml ├── eslint.config.mjs ├── langgraphjs-checkpoint-dynamodb.code-workspace ├── package.json ├── src ├── __mocks__ │ └── DynamoDBDocument.mock.ts ├── __tests__ │ ├── helpers │ │ └── expectErrorMessageToBeThrown.ts │ └── saver.test.ts ├── index.ts ├── saver.ts ├── types.ts └── write.ts ├── tests ├── __snapshots__ │ ├── .gitkeep │ └── integration.test.ts.snap └── integration.test.ts ├── tsconfig.cjs.json ├── tsconfig.esm.json └── tsconfig.json /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Desktop (please complete the following information):** 24 | - OS: [e.g. iOS] 25 | - Browser [e.g. chrome, safari] 26 | - Version [e.g. 22] 27 | 28 | **Additional context** 29 | Add any other context about the problem here. 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/actions/setup-codeclimate/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Setup CodeClimate' 2 | description: 'Downloads and sets up the CodeClimate Test Reporter' 3 | 4 | runs: 5 | using: 'composite' 6 | steps: 7 | - name: Download CodeClimate Test Reporter 8 | run: | 9 | curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter 10 | chmod +x ./cc-test-reporter 11 | shell: bash 12 | 13 | - name: Run CodeClimate Test Reporter before-build 14 | run: ./cc-test-reporter before-build 15 | shell: bash 16 | -------------------------------------------------------------------------------- /.github/actions/setup/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Setup Environment' 2 | description: 'Checks out code, sets up Node.js and Bun, caches dependencies, and installs them' 3 | inputs: 4 | node-version: 5 | description: 'Node.js version' 6 | required: true 7 | default: '20' 8 | bun-version: 9 | description: 'Bun version' 10 | required: true 11 | default: 'latest' 12 | runs: 13 | using: 'composite' 14 | steps: 15 | - name: Checkout code 16 | uses: actions/checkout@v3 17 | 18 | - name: Set up Node.js 19 | uses: actions/setup-node@v3 20 | with: 21 | node-version: ${{ inputs.node-version }} 22 | registry-url: "https://registry.npmjs.org" 23 | 24 | - name: Set up Bun 25 | uses: oven-sh/setup-bun@v1 26 | with: 27 | bun-version: ${{ inputs.bun-version }} 28 | 29 | - name: Cache Bun dependencies 30 | uses: actions/cache@v3 31 | with: 32 | path: ~/.bun/install/cache/v*/cache 33 | key: ${{ runner.os }}-bun-${{ hashFiles('bun.lockb') }} 34 | 35 | - name: Install dependencies 36 | run: bun install 37 | shell: bash 38 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Checks 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | paths-ignore: 11 | - '**/*.json' 12 | - '**/*.md' 13 | permissions: write-all # Needed for AI reviewer 14 | jobs: 15 | lint: 16 | runs-on: ubuntu-latest 17 | name: Lint Code 18 | steps: 19 | - name: Checkout code 20 | uses: actions/checkout@v3 21 | - name: Setup Environment 22 | uses: ./.github/actions/setup 23 | with: 24 | node-version: '20' 25 | bun-version: 'latest' 26 | 27 | - name: Run linter 28 | run: bun run lint 29 | 30 | format: 31 | runs-on: ubuntu-latest 32 | name: Check Code Formatting 33 | steps: 34 | - name: Checkout code 35 | uses: actions/checkout@v3 36 | - name: Setup Environment 37 | uses: ./.github/actions/setup 38 | with: 39 | node-version: '20' 40 | bun-version: 'latest' 41 | 42 | - name: Check code formatting 43 | run: bun run format:check 44 | 45 | build: 46 | runs-on: ubuntu-latest 47 | name: Build Project 48 | steps: 49 | - name: Checkout code 50 | uses: actions/checkout@v3 51 | - name: Setup Environment 52 | uses: ./.github/actions/setup 53 | with: 54 | node-version: '20' 55 | bun-version: 'latest' 56 | 57 | - name: Build the project 58 | run: bun run build 59 | 60 | test: 61 | runs-on: ubuntu-latest 62 | name: Unit Test Project 63 | env: 64 | CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} 65 | steps: 66 | - name: Checkout code 67 | uses: actions/checkout@v3 68 | - name: Setup Environment 69 | uses: ./.github/actions/setup 70 | with: 71 | node-version: '20' 72 | bun-version: 'latest' 73 | 74 | - name: Setup CodeClimate 75 | uses: ./.github/actions/setup-codeclimate 76 | 77 | - name: Run tests 78 | run: bun run test -- --coverage --coverage-reporter=lcov 79 | continue-on-error: true 80 | 81 | - name: Upload coverage artifact 82 | uses: actions/upload-artifact@v4 83 | with: 84 | name: unit-test-coverage 85 | path: coverage/lcov.info 86 | 87 | review: 88 | runs-on: ubuntu-latest 89 | needs: [lint, format, build, test] 90 | if: github.event_name == 'pull_request' 91 | steps: 92 | - name: Checkout Repo 93 | uses: actions/checkout@v3 94 | 95 | - name: AI Code Reviewer 96 | uses: researchwiseai/ai-codereviewer@main 97 | with: 98 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 99 | OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY_V2 }} 100 | OPENAI_API_MODEL: 'o1-mini' 101 | exclude: '**/*.json, **/*.md' 102 | 103 | integration-test: 104 | runs-on: ubuntu-latest 105 | name: Integration Test Project 106 | needs: [lint, format, build, test] 107 | env: 108 | CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} 109 | services: 110 | dynamodb: 111 | image: amazon/dynamodb-local 112 | ports: 113 | - 8000:8000 114 | steps: 115 | - name: Checkout code 116 | uses: actions/checkout@v3 117 | 118 | - name: Setup Environment 119 | uses: ./.github/actions/setup 120 | with: 121 | node-version: '20' 122 | bun-version: 'latest' 123 | 124 | - name: Wait for DynamoDB service to be ready 125 | run: | 126 | for i in {1..10}; do 127 | if nc -z localhost 8000; then 128 | echo "DynamoDB is up!" 129 | break 130 | fi 131 | echo "Waiting for DynamoDB..." 132 | sleep 2 133 | done 134 | 135 | - name: Setup CodeClimate 136 | uses: ./.github/actions/setup-codeclimate 137 | 138 | - name: Run integration tests 139 | env: 140 | AWS_ACCESS_KEY_ID: 'fakeMyKeyId' 141 | AWS_SECRET_ACCESS_KEY: 'fakeSecretAccessKey' 142 | AWS_REGION: 'local' 143 | AWS_DYNAMODB_ENDPOINT: 'http://localhost:8000' 144 | run: bun run test:integration -- --coverage --coverage-reporter=lcov 145 | continue-on-error: true 146 | 147 | - name: Upload coverage artifact 148 | uses: actions/upload-artifact@v4 149 | with: 150 | name: integration-test-coverage 151 | path: coverage/lcov.info 152 | 153 | coverage-report: 154 | runs-on: ubuntu-latest 155 | needs: [test, integration-test] 156 | env: 157 | CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} 158 | steps: 159 | - name: Checkout code 160 | uses: actions/checkout@v3 161 | 162 | - name: Download unit test coverage artifact 163 | uses: actions/download-artifact@v4 164 | with: 165 | name: unit-test-coverage 166 | path: ./coverage/unit 167 | 168 | - name: Download integration test coverage artifact 169 | uses: actions/download-artifact@v4 170 | with: 171 | name: integration-test-coverage 172 | path: ./coverage/integration 173 | 174 | - name: Setup CodeClimate 175 | uses: ./.github/actions/setup-codeclimate 176 | 177 | - name: Format unit test coverage 178 | run: | 179 | ./cc-test-reporter format-coverage -t lcov -o codeclimate.unit.json ./coverage/unit/lcov.info 180 | 181 | - name: Format integration test coverage 182 | run: | 183 | ./cc-test-reporter format-coverage -t lcov -o codeclimate.integration.json ./coverage/integration/lcov.info 184 | 185 | - name: Sum coverage reports 186 | run: | 187 | ./cc-test-reporter sum-coverage codeclimate.unit.json codeclimate.integration.json -o codeclimate.total.json 188 | 189 | - name: Upload coverage to CodeClimate 190 | run: | 191 | ./cc-test-reporter upload-coverage -i codeclimate.total.json 192 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*.*.*' 7 | 8 | permissions: 9 | contents: write 10 | id-token: write 11 | actions: read 12 | 13 | jobs: 14 | release: 15 | runs-on: ubuntu-latest 16 | outputs: 17 | digests: ${{ steps.hash.outputs.digests }} 18 | 19 | steps: 20 | - name: Checkout code 21 | uses: actions/checkout@v3 22 | 23 | - name: Setup Environment 24 | uses: ./.github/actions/setup 25 | with: 26 | node-version: '20' 27 | bun-version: 'latest' 28 | 29 | - name: Check version matches tag 30 | shell: bash 31 | run: | 32 | # Extract version from package.json 33 | PACKAGE_VERSION=$(node -p "require('./package.json').version") 34 | echo "Package version: $PACKAGE_VERSION" 35 | 36 | # Extract tag version from GITHUB_REF 37 | TAG=${GITHUB_REF#refs/tags/} 38 | echo "Git tag: $TAG" 39 | 40 | # Remove 'v' prefix 41 | TAG_VERSION=${TAG#v} 42 | 43 | echo "Tag version: $TAG_VERSION" 44 | 45 | # Compare versions 46 | if [ "$PACKAGE_VERSION" != "$TAG_VERSION" ]; then 47 | echo "::error::Package version ($PACKAGE_VERSION) does not match tag version ($TAG_VERSION)" 48 | exit 1 49 | fi 50 | 51 | echo "✅ Package version matches tag version." 52 | 53 | - name: Build the project 54 | run: bun run build 55 | 56 | - name: Generate subject for provenance 57 | id: hash 58 | run: | 59 | set -euo pipefail 60 | # List the artifacts the provenance will refer to. 61 | files=$(find dist -type f) 62 | # Generate the subjects (base64 encoded). 63 | echo "digests=$(sha256sum $files | base64 -w0)" >> "${GITHUB_OUTPUT}" 64 | 65 | - name: Publish to NPM 66 | run: npm publish --access public 67 | env: 68 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 69 | 70 | - name: Generate changelog 71 | id: changelog 72 | run: | 73 | echo "changelog=$(git log --merges --pretty=format:'- %s by @%an' ${GITHUB_SHA}...${{ github.ref }})" >> $GITHUB_OUTPUT 74 | 75 | - name: Create GitHub Release 76 | id: create_release 77 | uses: actions/create-release@v1 78 | env: 79 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 80 | with: 81 | tag_name: ${{ github.ref }} 82 | release_name: Release ${{ github.ref }} 83 | body: | 84 | ## Changes 85 | ${{ steps.changelog.outputs.changelog }} 86 | 87 | provenance: 88 | needs: [release] 89 | permissions: 90 | actions: read 91 | id-token: write 92 | contents: write 93 | uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0 94 | with: 95 | base64-subjects: '${{ needs.release.outputs.digests }}' 96 | upload-assets: true 97 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | * 2 | !src/**/*.ts -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 4, 4 | "semi": true, 5 | "singleQuote": true, 6 | "trailingComma": "es5", 7 | "arrowParens": "avoid", 8 | "endOfLine": "auto" 9 | } 10 | -------------------------------------------------------------------------------- /.qlty/qlty.toml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by `qlty init`. 2 | # You can modify it to suit your needs. 3 | # We recommend you to commit this file to your repository. 4 | # 5 | # This configuration is used by both Qlty CLI and Qlty Cloud. 6 | # 7 | # Qlty CLI -- Code quality toolkit for developers 8 | # Qlty Cloud -- Fully automated Code Health Platform 9 | # 10 | # Try Qlty Cloud: https://qlty.sh 11 | # 12 | # For a guide to configuration, visit https://qlty.sh/d/config 13 | # Or for a full reference, visit https://qlty.sh/d/qlty-toml 14 | config_version = "0" 15 | 16 | exclude_patterns = [ 17 | "*_min.*", 18 | "*-min.*", 19 | "*.min.*", 20 | "**/*.d.ts", 21 | "**/.yarn/**", 22 | "**/bower_components/**", 23 | "**/build/**", 24 | "**/cache/**", 25 | "**/config/**", 26 | "**/db/**", 27 | "**/deps/**", 28 | "**/dist/**", 29 | "**/extern/**", 30 | "**/external/**", 31 | "**/generated/**", 32 | "**/Godeps/**", 33 | "**/gradlew/**", 34 | "**/mvnw/**", 35 | "**/node_modules/**", 36 | "**/protos/**", 37 | "**/seed/**", 38 | "**/target/**", 39 | "**/testdata/**", 40 | "**/vendor/**", 41 | "**/assets/**", 42 | ] 43 | 44 | test_patterns = [ 45 | "**/test/**", 46 | "**/spec/**", 47 | "**/*.test.*", 48 | "**/*.spec.*", 49 | "**/*_test.*", 50 | "**/*_spec.*", 51 | "**/test_*.*", 52 | "**/spec_*.*", 53 | ] 54 | 55 | [smells] 56 | mode = "comment" 57 | 58 | [[source]] 59 | name = "default" 60 | default = true 61 | 62 | [[plugin]] 63 | name = "actionlint" 64 | 65 | [[plugin]] 66 | name = "checkov" 67 | 68 | [[plugin]] 69 | name = "eslint" 70 | version = "9.7.0" 71 | package_file = "package.json" 72 | package_filters = ["eslint", "prettier"] 73 | 74 | [[plugin]] 75 | name = "golangci-lint" 76 | 77 | [[plugin]] 78 | name = "hadolint" 79 | 80 | [[plugin]] 81 | name = "markdownlint" 82 | version = "0.41.0" 83 | 84 | [[plugin]] 85 | name = "prettier" 86 | package_file = "package.json" 87 | package_filters = ["prettier"] 88 | 89 | [[plugin]] 90 | name = "ripgrep" 91 | 92 | [[plugin]] 93 | name = "trivy" 94 | drivers = [ 95 | "config", 96 | "fs-vuln", 97 | ] 98 | 99 | [[plugin]] 100 | name = "trufflehog" 101 | 102 | [[plugin]] 103 | name = "yamllint" 104 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | support@researchwiseai.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM oven/bun:1 2 | 3 | WORKDIR /usr/src/app 4 | 5 | COPY package.json bun.lockb ./ 6 | 7 | RUN bun install --frozen-lockfile 8 | 9 | COPY src ./src 10 | COPY tests ./tests 11 | 12 | CMD ["bun", "run", "test:integration"] 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 researchwiseai 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # langgraphjs-checkpoint-dynamodb 2 | 3 | Implementation of a LangGraph.js CheckpointSaver that uses a AWS's DynamoDB 4 | 5 | ## Package name 6 | 7 | ```bash 8 | @rwai/langgraphjs-checkpoint-dynamodb 9 | ``` 10 | 11 | ## Inspiration 12 | 13 | Guidance and inspiration has been taken from the existing checkpoint savers 14 | (Sqlite and MongoDB) written by the Langgraph JS team. 15 | 16 | - [Sqlite](https://github.com/langchain-ai/langgraphjs/tree/main/libs/checkpoint-sqlite) 17 | - [MongoDB](https://github.com/langchain-ai/langgraphjs/tree/main/libs/checkpoint-mongodb) 18 | 19 | ## Required DynamoDB Tables 20 | 21 | To be able to use this checkpointer, two DynamoDB table's are needed, one to store 22 | checkpoints and the other to store writes. Below are some examples of how you 23 | can create the required tables. 24 | 25 | ### Terraform 26 | 27 | ```hcl 28 | # Variables for table names 29 | variable "checkpoints_table_name" { 30 | type = string 31 | } 32 | 33 | variable "writes_table_name" { 34 | type = string 35 | } 36 | 37 | # Checkpoints Table 38 | resource "aws_dynamodb_table" "checkpoints_table" { 39 | name = var.checkpoints_table_name 40 | billing_mode = "PAY_PER_REQUEST" 41 | 42 | hash_key = "thread_id" 43 | range_key = "checkpoint_id" 44 | 45 | attribute { 46 | name = "thread_id" 47 | type = "S" 48 | } 49 | 50 | attribute { 51 | name = "checkpoint_id" 52 | type = "S" 53 | } 54 | } 55 | 56 | # Writes Table 57 | resource "aws_dynamodb_table" "writes_table" { 58 | name = var.writes_table_name 59 | billing_mode = "PAY_PER_REQUEST" 60 | 61 | hash_key = "thread_id_checkpoint_id_checkpoint_ns" 62 | range_key = "task_id_idx" 63 | 64 | attribute { 65 | name = "thread_id_checkpoint_id_checkpoint_ns" 66 | type = "S" 67 | } 68 | 69 | attribute { 70 | name = "task_id_idx" 71 | type = "S" 72 | } 73 | } 74 | ``` 75 | 76 | ### AWS CDK 77 | 78 | ```typescript 79 | import * as cdk from '@aws-cdk/core'; 80 | import * as dynamodb from '@aws-cdk/aws-dynamodb'; 81 | 82 | export class DynamoDbStack extends cdk.Stack { 83 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 84 | super(scope, id, props); 85 | 86 | const checkpointsTableName = 'YourCheckpointsTableName'; 87 | const writesTableName = 'YourWritesTableName'; 88 | 89 | // Checkpoints Table 90 | new dynamodb.Table(this, 'CheckpointsTable', { 91 | tableName: checkpointsTableName, 92 | billingMode: dynamodb.BillingMode.PAY_PER_REQUEST, 93 | partitionKey: { name: 'thread_id', type: dynamodb.AttributeType.STRING }, 94 | sortKey: { name: 'checkpoint_id', type: dynamodb.AttributeType.STRING }, 95 | }); 96 | 97 | // Writes Table 98 | new dynamodb.Table(this, 'WritesTable', { 99 | tableName: writesTableName, 100 | billingMode: dynamodb.BillingMode.PAY_PER_REQUEST, 101 | partitionKey: { 102 | name: 'thread_id_checkpoint_id_checkpoint_ns', 103 | type: dynamodb.AttributeType.STRING, 104 | }, 105 | sortKey: { name: 'task_id_idx', type: dynamodb.AttributeType.STRING }, 106 | }); 107 | } 108 | } 109 | ``` 110 | 111 | ## Using the Checkpoint Saver 112 | 113 | ### Default 114 | 115 | To use the DynamoDB checkpoint saver, you only need to specify the names of 116 | the checkpoints and writes tables. In this scenario the DynamoDB client will 117 | be instantiated with the default configuration, great for running on AWS Lambda. 118 | 119 | ```typescript 120 | import { DynamoDBSaver } from '@rwai/langgraphjs-checkpoint-dynamodb'; 121 | ... 122 | const checkpointsTableName = 'YourCheckpointsTableName'; 123 | const writesTableName = 'YourWritesTableName'; 124 | 125 | const memory = new DynamoDBSaver({ 126 | checkpointsTableName, 127 | writesTableName, 128 | }); 129 | 130 | const graph = workflow.compile({ checkpointer: memory }); 131 | ``` 132 | 133 | ### Providing Client Configuration 134 | 135 | If you need to provide custom configuration to the DynamoDB client, you can 136 | pass in an object with the configuration options. Below is an example of how 137 | you can provide custom configuration. 138 | 139 | ```typescript 140 | const memory = new DynamoDBSaver({ 141 | checkpointsTableName, 142 | writesTableName, 143 | clientConfig: { 144 | region: 'us-west-2', 145 | accessKeyId: 'your-access-key-id', 146 | secretAccessKey: 'your-secret-access-key', 147 | }, 148 | }); 149 | ``` 150 | 151 | ### Custom Serde (Serialization/Deserialization) 152 | 153 | Just as with the Sqlite and MongoDB checkpoint savers, you can provide custom 154 | serialization and deserialization functions. Below is an example of how you can 155 | provide custom serialization and deserialization functions. 156 | 157 | ```typescript 158 | import { serialize, deserialize } from '@ungap/structured-clone'; 159 | const serde = { 160 | dumpsTyped: async function (obj: unknown): [string, Uint8Array] { 161 | if (obj instanceof Uint8Array) { 162 | return ['bytes', obj]; 163 | } else { 164 | return ['json', new TextEncoder().encode(serialize(obj))]; 165 | } 166 | }, 167 | loadsTyped: async function (type: string, data: Uint8Array | string): unknown { 168 | switch (type) { 169 | case 'json': 170 | return deserialize( 171 | typeof data === 'string' ? data : new TextDecoder().decode(data) 172 | ); 173 | case 'bytes': 174 | return typeof data === 'string' ? new TextEncoder().encode(data) : data; 175 | default: 176 | throw new Error(`Unknown serialization type: ${type}`); 177 | } 178 | }, 179 | }; 180 | 181 | const memory = new DynamoDBSaver({ 182 | checkpointsTableName, 183 | writesTableName, 184 | serde, 185 | }); 186 | ``` 187 | -------------------------------------------------------------------------------- /bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/researchwiseai/langgraphjs-checkpoint-dynamodb/b32070be6e575feb5d364e60018506564112fa3e/bun.lockb -------------------------------------------------------------------------------- /docker-compose.test.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | dynamodb: 5 | image: amazon/dynamodb-local 6 | container_name: dynamodb-local 7 | ports: 8 | - "8000:8000" 9 | volumes: 10 | - dynamodb_data:/home/dynamodblocal/data 11 | healthcheck: 12 | test: 13 | [ 14 | "CMD-SHELL", 15 | 'if [ "$(curl -s -o /dev/null -I -w ''%{http_code}'' http://localhost:8000)" == "400" ]; then exit 0; else exit 1; fi', 16 | ] 17 | interval: 10s 18 | timeout: 2s 19 | retries: 5 20 | start_period: 5s 21 | 22 | integration-tests: 23 | build: 24 | context: . 25 | dockerfile: Dockerfile 26 | container_name: integration-tests 27 | volumes: 28 | - ./tests/__snapshots__/:/usr/src/app/tests/__snapshots__ 29 | depends_on: 30 | dynamodb: 31 | condition: service_healthy 32 | environment: 33 | - AWS_ACCESS_KEY_ID=fakeMyKeyId 34 | - AWS_SECRET_ACCESS_KEY=fakeSecretAccessKey 35 | - AWS_REGION=local 36 | - AWS_DYNAMODB_ENDPOINT=http://dynamodb:8000 37 | 38 | volumes: 39 | dynamodb_data: 40 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import globals from 'globals'; 2 | import pluginJs from '@eslint/js'; 3 | import tseslint from 'typescript-eslint'; 4 | import markdown from '@eslint/markdown'; 5 | 6 | export default [ 7 | ...markdown.configs.recommended, 8 | { files: ['**/*.{js,mjs,cjs,ts}'] }, 9 | { languageOptions: { globals: globals.node } }, 10 | pluginJs.configs.recommended, 11 | ...tseslint.configs.recommended, 12 | { 13 | files: ['*.md'], 14 | rules: { 15 | 'no-irregular-whitespace': 'off', // This rule was causing an error while linting markdown files 16 | }, 17 | }, 18 | ]; 19 | -------------------------------------------------------------------------------- /langgraphjs-checkpoint-dynamodb.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": ".", 5 | }, 6 | ], 7 | "settings": { 8 | // **Editor Settings** 9 | "editor.tabSize": 4, 10 | "editor.formatOnSave": true, 11 | "editor.codeActionsOnSave": { 12 | "source.fixAll.eslint": "explicit", 13 | }, 14 | 15 | // **TypeScript Settings** 16 | "typescript.tsdk": "node_modules/typescript/lib", 17 | "typescript.enablePromptUseWorkspaceTsdk": true, 18 | 19 | // **ESLint Settings** 20 | "eslint.validate": ["typescript", "typescriptreact"], 21 | 22 | // **Prettier Settings** 23 | "prettier.requireConfig": true, 24 | 25 | "search.exclude": { 26 | "**/node_modules": true, 27 | "**/dist": true, 28 | }, 29 | "cSpell.words": ["checkpointer", "Langgraph", "langgraphjs"], 30 | }, 31 | "tasks": { 32 | "version": "2.0.0", 33 | "tasks": [ 34 | { 35 | "label": "Clean", 36 | "type": "shell", 37 | "command": "bun", 38 | "args": ["run", "clean"], 39 | "group": "build", 40 | "problemMatcher": [], 41 | }, 42 | { 43 | "label": "Build", 44 | "type": "shell", 45 | "command": "bun", 46 | "args": ["run", "build"], 47 | "group": "build", 48 | "dependsOn": "Clean", 49 | "problemMatcher": [], 50 | }, 51 | { 52 | "label": "Test", 53 | "type": "shell", 54 | "command": "bun", 55 | "args": ["run", "test"], 56 | "group": "test", 57 | "problemMatcher": [], 58 | }, 59 | { 60 | "label": "Lint", 61 | "type": "shell", 62 | "command": "bun", 63 | "args": ["run", "lint"], 64 | "group": "build", 65 | "problemMatcher": [], 66 | }, 67 | { 68 | "label": "Format", 69 | "type": "shell", 70 | "command": "bun", 71 | "args": ["run", "format"], 72 | "group": "build", 73 | "problemMatcher": [], 74 | }, 75 | ], 76 | }, 77 | "launch": { 78 | "version": "0.2.0", 79 | "configurations": [ 80 | { 81 | "name": "Launch Program", 82 | "type": "node", 83 | "request": "launch", 84 | "program": "${workspaceFolder}/src/index.ts", 85 | "preLaunchTask": "Build", 86 | "outFiles": ["${workspaceFolder}/dist/esm/**/*.js"], 87 | "cwd": "${workspaceFolder}", 88 | "console": "integratedTerminal", 89 | "internalConsoleOptions": "neverOpen", 90 | "skipFiles": ["/**"], 91 | "runtimeExecutable": "bun", 92 | }, 93 | ], 94 | }, 95 | "extensions": { 96 | "recommendations": [ 97 | "dbaeumer.vscode-eslint", 98 | "esbenp.prettier-vscode", 99 | "ms-vscode.vscode-typescript-next", 100 | "bun-lang.bun", 101 | ], 102 | }, 103 | } 104 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@rwai/langgraphjs-checkpoint-dynamodb", 3 | "version": "0.1.6", 4 | "license": "MIT", 5 | "description": "Implementation of a LangGraph.js CheckpointSaver that uses a AWS's DynamoDB", 6 | "main": "./dist/cjs/index.js", 7 | "module": "./dist/esm/index.js", 8 | "types": "./dist/esm/index.d.ts", 9 | "exports": { 10 | ".": { 11 | "require": "./dist/cjs/index.js", 12 | "import": "./dist/esm/index.js", 13 | "types": "./dist/esm/index.d.ts" 14 | } 15 | }, 16 | "publishConfig": { 17 | "access": "public" 18 | }, 19 | "repository": { 20 | "type": "git", 21 | "url": "https://github.com/researchwiseai/langgraphjs-checkpoint-dynamodb" 22 | }, 23 | "homepage": "https://github.com/researchwiseai/langgraphjs-checkpoint-dynamodb", 24 | "author": { 25 | "name": "Will Poynter", 26 | "email": "will@researchwiseai.com" 27 | }, 28 | "keywords": [ 29 | "langgraph", 30 | "langchain", 31 | "dynamodb", 32 | "aws" 33 | ], 34 | "contributors": [ 35 | { 36 | "name": "Will Poynter", 37 | "email": "will@researchwiseai.com" 38 | }, 39 | { 40 | "name": "Jeff Laflamme", 41 | "url": "https://www.npmjs.com/~jflaflamme" 42 | }, 43 | { 44 | "name": "Quentin Georget", 45 | "url": "https://www.npmjs.com/~tinque" 46 | } 47 | ], 48 | "scripts": { 49 | "build": "bun run build:esm && bun run build:cjs", 50 | "build:esm": "tsc --project tsconfig.esm.json", 51 | "build:cjs": "tsc --project tsconfig.cjs.json", 52 | "clean": "rimraf dist", 53 | "test": "bun run test:unit", 54 | "test:unit": "bun test -t unit", 55 | "test:integration": "bun test -t integration", 56 | "lint": "eslint 'src/**/*.{ts,tsx}'", 57 | "format": "prettier --write 'src/**/*.{ts,tsx}'", 58 | "format:check": "prettier --check 'src/**/*.{ts,tsx}'" 59 | }, 60 | "engines": { 61 | "node": ">=20" 62 | }, 63 | "devDependencies": { 64 | "@eslint/js": "^9.13.1", 65 | "@eslint/markdown": "^6.2.1", 66 | "@langchain/langgraph": "^0.2.19", 67 | "@types/bun": "latest", 68 | "@types/ungap__structured-clone": "^1.2.0", 69 | "@typescript-eslint/eslint-plugin": "^8.12.2", 70 | "@typescript-eslint/parser": "^8.12.2", 71 | "@ungap/structured-clone": "^1.2.0", 72 | "eslint": "^9.13.0", 73 | "eslint-config-prettier": "^9.1.0", 74 | "eslint-plugin-prettier": "^5.2.1", 75 | "globals": "^15.11.0", 76 | "prettier": "^3.3.3", 77 | "rimraf": "^6.0.1", 78 | "typescript-eslint": "^8.12.2" 79 | }, 80 | "peerDependencies": { 81 | "typescript": "^5.0.0" 82 | }, 83 | "dependencies": { 84 | "@aws-sdk/client-dynamodb": "^3.682.0", 85 | "@aws-sdk/lib-dynamodb": "^3.682.0", 86 | "@langchain/core": "^0.3.16", 87 | "@langchain/langgraph-checkpoint": "^0.0.11" 88 | }, 89 | "files": [ 90 | "dist", 91 | "LICENSE", 92 | "README.md" 93 | ] 94 | } 95 | -------------------------------------------------------------------------------- /src/__mocks__/DynamoDBDocument.mock.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | export class MockDynamoDBDocument { 3 | private tables: Record> = {}; 4 | 5 | constructor() { 6 | // Initialize in-memory tables 7 | this.tables = {}; 8 | } 9 | 10 | async get(params: { TableName: string; Key: Record }) { 11 | const { TableName, Key } = params; 12 | const table = this.tables[TableName] || {}; 13 | const itemKey = JSON.stringify(Key); 14 | const Item = table[itemKey]; 15 | return { Item }; 16 | } 17 | 18 | async put(params: { TableName: string; Item: any }) { 19 | const { TableName, Item } = params; 20 | 21 | this.sizeGuard(Item); 22 | 23 | const table = this.tables[TableName] || {}; 24 | const keyAttributes = this.getKeyAttributes(Item); 25 | const itemKey = JSON.stringify(keyAttributes); 26 | table[itemKey] = Item; 27 | this.tables[TableName] = table; 28 | return {}; 29 | } 30 | 31 | async query(params: { 32 | TableName: string; 33 | KeyConditionExpression: string; 34 | ExpressionAttributeValues: Record; 35 | Limit?: number; 36 | ScanIndexForward?: boolean; 37 | FilterExpression?: string; 38 | ConsistentRead?: boolean; 39 | }) { 40 | const { 41 | TableName, 42 | ExpressionAttributeValues, 43 | Limit, 44 | ScanIndexForward, 45 | } = params; 46 | const table = this.tables[TableName] || {}; 47 | const items = Object.values(table); 48 | 49 | const filteredItems = this.filterItems( 50 | items, 51 | ExpressionAttributeValues 52 | ); 53 | const sortedItems = this.sortItems(filteredItems, ScanIndexForward); 54 | const limitedItems = this.limitItems(sortedItems, Limit); 55 | 56 | return { Items: limitedItems }; 57 | } 58 | 59 | async batchWrite(params: { RequestItems: Record }) { 60 | for (const TableName in params.RequestItems) { 61 | this.processTableRequests( 62 | TableName, 63 | params.RequestItems[TableName] 64 | ); 65 | } 66 | return {}; 67 | } 68 | 69 | private filterItems( 70 | items: any[], 71 | ExpressionAttributeValues: Record 72 | ): any[] { 73 | return items.filter(item => { 74 | for (const key in ExpressionAttributeValues) { 75 | const attributeName = key.replace(':', ''); 76 | if (item[attributeName] !== ExpressionAttributeValues[key]) { 77 | return false; 78 | } 79 | } 80 | return true; 81 | }); 82 | } 83 | 84 | private sortItems(items: any[], ScanIndexForward?: boolean): any[] { 85 | if (ScanIndexForward === undefined) return items; 86 | 87 | return items.sort((a, b) => { 88 | const aKey = a.checkpoint_id; 89 | const bKey = b.checkpoint_id; 90 | if (ScanIndexForward) { 91 | return aKey.localeCompare(bKey); 92 | } else { 93 | return bKey.localeCompare(aKey); 94 | } 95 | }); 96 | } 97 | 98 | private limitItems(items: any[], Limit?: number): any[] { 99 | return Limit ? items.slice(0, Limit) : items; 100 | } 101 | 102 | private processTableRequests(TableName: string, requests: any[]) { 103 | const table = this.tables[TableName] || {}; 104 | for (const request of requests) { 105 | if (request.PutRequest) { 106 | this.processPutRequest(table, request.PutRequest.Item); 107 | } 108 | // Handle other requests if needed 109 | } 110 | this.tables[TableName] = table; 111 | } 112 | 113 | private processPutRequest(table: Record, Item: any) { 114 | this.sizeGuard(Item); 115 | const keyAttributes = this.getKeyAttributes(Item); 116 | const itemKey = JSON.stringify(keyAttributes); 117 | table[itemKey] = Item; 118 | } 119 | 120 | private calculateItemSize(item: any): number { 121 | // Serialize the item to a JSON string 122 | const serializedItem = JSON.stringify(item); 123 | 124 | // Calculate the byte length of the serialized item using UTF-8 encoding 125 | const itemSizeInBytes = new TextEncoder().encode(serializedItem).length; 126 | 127 | return itemSizeInBytes; 128 | } 129 | 130 | // Helper method to extract key attributes from an item 131 | private getKeyAttributes(item: any): Record { 132 | const keyAttributes: Record = {}; 133 | if (item.thread_id) keyAttributes.thread_id = item.thread_id; 134 | if (item.checkpoint_id) 135 | keyAttributes.checkpoint_id = item.checkpoint_id; 136 | return keyAttributes; 137 | } 138 | 139 | private sizeGuard(item: any) { 140 | const itemSizeInBytes = this.calculateItemSize(item); 141 | if (itemSizeInBytes > 409600) { 142 | throw new Error('Item size has exceeded the maximum allowed size'); 143 | } 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/__tests__/helpers/expectErrorMessageToBeThrown.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | import { expect } from 'bun:test'; 3 | 4 | export async function expectErrorMessageToBeThrown( 5 | callback: () => Promise, 6 | message: string 7 | ) { 8 | try { 9 | await callback(); 10 | throw new Error("Expected function to throw an error, but it didn't"); 11 | } catch (error) { 12 | expect(error).toBeInstanceOf(Error); 13 | expect((error as any).message).toBe(message); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/__tests__/saver.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | import { describe, it, expect, beforeEach } from 'bun:test'; 3 | import { 4 | Checkpoint, 5 | CheckpointTuple, 6 | uuid6, 7 | CheckpointMetadata, 8 | } from '@langchain/langgraph-checkpoint'; 9 | import { DynamoDBSaver } from '../saver'; 10 | import { MockDynamoDBDocument } from '../__mocks__/DynamoDBDocument.mock'; 11 | import { SerializerProtocol } from '@langchain/langgraph-checkpoint'; 12 | import type { PendingWrite } from '@langchain/langgraph-checkpoint'; 13 | import { expectErrorMessageToBeThrown } from './helpers/expectErrorMessageToBeThrown'; 14 | 15 | // Mock Serializer 16 | class MockSerializer implements SerializerProtocol { 17 | dumpsTyped(value: any): [string, Uint8Array] { 18 | return ['json', new TextEncoder().encode(JSON.stringify(value))]; 19 | } 20 | 21 | async loadsTyped(type: string, value: Uint8Array | string): Promise { 22 | switch (type) { 23 | case 'json': 24 | return JSON.parse( 25 | typeof value === 'string' 26 | ? value 27 | : new TextDecoder().decode(value) 28 | ); 29 | default: 30 | throw new Error(`Unsupported type: ${type}`); 31 | } 32 | } 33 | } 34 | 35 | function createCheckpoint(num: number): Checkpoint { 36 | return { 37 | v: 1, 38 | id: uuid6(num), 39 | ts: `2024-04-${num + 18}T17:19:07.952Z`, 40 | channel_values: { 41 | someKey1: `someValue${num}`, 42 | }, 43 | channel_versions: { 44 | someKey2: num, 45 | }, 46 | versions_seen: { 47 | someKey3: { 48 | someKey4: num, 49 | }, 50 | }, 51 | pending_sends: [], 52 | }; 53 | } 54 | 55 | const checkpoint1 = createCheckpoint(1); 56 | const checkpoint2 = createCheckpoint(2); 57 | 58 | const config1 = { 59 | configurable: { thread_id: '1' }, 60 | } as const; 61 | 62 | describe('DynamoDBSaver', () => { 63 | describe('unit', () => { 64 | let saver: DynamoDBSaver; 65 | let mockDocClient: MockDynamoDBDocument; 66 | let serializer: MockSerializer; 67 | 68 | beforeEach(() => { 69 | mockDocClient = new MockDynamoDBDocument(); 70 | serializer = new MockSerializer(); 71 | 72 | // Initialize the DynamoDBSaver with the mock client 73 | saver = new DynamoDBSaver({ 74 | clientConfig: {}, // Empty config since we're mocking 75 | serde: serializer, 76 | checkpointsTableName: 'Checkpoints', 77 | writesTableName: 'Writes', 78 | }); 79 | 80 | // Replace the real docClient with the mock 81 | (saver as any).docClient = mockDocClient; 82 | }); 83 | 84 | it('should save and retrieve checkpoints correctly', async () => { 85 | // Get undefined checkpoint 86 | const undefinedCheckpoint = await saver.getTuple(config1); 87 | expect(undefinedCheckpoint).toBeUndefined(); 88 | 89 | // Save first checkpoint 90 | const runnableConfig = await saver.put(config1, checkpoint1, { 91 | source: 'update', 92 | step: -1, 93 | writes: null, 94 | } as CheckpointMetadata); 95 | expect(runnableConfig).toEqual({ 96 | configurable: { 97 | thread_id: '1', 98 | checkpoint_ns: '', 99 | checkpoint_id: checkpoint1.id, 100 | }, 101 | }); 102 | 103 | // Add some writes 104 | await saver.putWrites( 105 | { 106 | configurable: { 107 | thread_id: '1', 108 | checkpoint_ns: '', 109 | checkpoint_id: checkpoint1.id, 110 | }, 111 | }, 112 | [['bar', 'baz']] as PendingWrite[], 113 | 'foo' 114 | ); 115 | 116 | // Get first checkpoint tuple 117 | const firstCheckpointTuple = await saver.getTuple(config1); 118 | expect(firstCheckpointTuple?.config).toEqual({ 119 | configurable: { 120 | thread_id: '1', 121 | checkpoint_ns: '', 122 | checkpoint_id: checkpoint1.id, 123 | }, 124 | }); 125 | expect(firstCheckpointTuple?.checkpoint).toEqual(checkpoint1); 126 | expect(firstCheckpointTuple?.parentConfig).toBeUndefined(); 127 | expect(firstCheckpointTuple?.pendingWrites).toEqual([ 128 | ['foo', 'bar', 'baz'], 129 | ]); 130 | 131 | const config1WithId = { 132 | configurable: { 133 | thread_id: config1.configurable.thread_id, 134 | checkpoint_ns: '', 135 | checkpoint_id: '2024-04-18T17:19:07.952Z', 136 | }, 137 | }; 138 | 139 | // Save second checkpoint with parent_checkpoint_id 140 | await saver.put(config1WithId, checkpoint2, { 141 | source: 'update', 142 | step: -1, 143 | writes: null, 144 | } as CheckpointMetadata); 145 | 146 | // Verify that parentConfig is set and retrieved correctly for second checkpoint 147 | const secondCheckpointTuple = await saver.getTuple(config1); 148 | expect(secondCheckpointTuple?.parentConfig).toEqual(config1WithId); 149 | 150 | // List checkpoints 151 | const checkpointTuples: CheckpointTuple[] = []; 152 | for await (const checkpoint of saver.list(config1)) { 153 | checkpointTuples.push(checkpoint); 154 | } 155 | expect(checkpointTuples.length).toBe(2); 156 | 157 | const checkpointTuple1 = checkpointTuples[0]; 158 | const checkpointTuple2 = checkpointTuples[1]; 159 | expect(checkpointTuple1.checkpoint.ts).toBe( 160 | '2024-04-20T17:19:07.952Z' 161 | ); 162 | expect(checkpointTuple2.checkpoint.ts).toBe( 163 | '2024-04-19T17:19:07.952Z' 164 | ); 165 | }); 166 | 167 | it('should throw an error when thread_id is missing in getTuple', async () => { 168 | const config = { 169 | configurable: { 170 | checkpoint_id: 'checkpoint1', 171 | }, 172 | }; 173 | 174 | await expectErrorMessageToBeThrown( 175 | () => saver.getTuple(config), 176 | 'Invalid thread_id' 177 | ); 178 | }); 179 | 180 | it('should throw an error when checkpoint_id is invalid in getTuple', async () => { 181 | const config = { 182 | configurable: { 183 | thread_id: config1.configurable.thread_id, 184 | checkpoint_id: 123, // Invalid type 185 | }, 186 | }; 187 | 188 | await expectErrorMessageToBeThrown( 189 | () => saver.getTuple(config), 190 | 'Invalid checkpoint_id' 191 | ); 192 | }); 193 | 194 | it.skip('should throw an error when serializer returns unsupported type', async () => { 195 | // jest.spyOn(serializer, 'dumpsTyped').mockImplementation(() => ['unsupported', 'data']); 196 | // const config = { 197 | // configurable: { 198 | // thread_id: '1', 199 | // }, 200 | // }; 201 | // const checkpoint = { id: 'checkpoint1', data: 'some data' }; 202 | // const metadata = { source: 'update', step: -1, writes: null } as CheckpointMetadata; 203 | // await expect(saver.put(config, checkpoint, metadata)).rejects.toThrow('Unsupported type: unsupported'); 204 | }); 205 | 206 | it.skip('should handle deserialization errors gracefully in getTuple', async () => { 207 | // jest.spyOn(serializer, 'loadsTyped').mockImplementation(() => { 208 | // throw new Error('Deserialization error'); 209 | // }); 210 | // const config = { 211 | // configurable: { 212 | // thread_id: '1', 213 | // }, 214 | // }; 215 | // const checkpoint = { id: 'checkpoint1', data: 'some data' }; 216 | // const metadata = { source: 'update', step: -1, writes: null } as CheckpointMetadata; 217 | // await saver.put(config, checkpoint, metadata); 218 | // await expect(saver.getTuple(config)).rejects.toThrow('Deserialization error'); 219 | }); 220 | 221 | it('should handle checkpoints with empty data', async () => { 222 | const config = { 223 | configurable: { 224 | thread_id: '1', 225 | }, 226 | }; 227 | 228 | const checkpoint = { id: 'checkpoint1' } as Checkpoint; 229 | const metadata = {} as CheckpointMetadata; 230 | 231 | await saver.put(config, checkpoint, metadata); 232 | 233 | const retrieved = await saver.getTuple(config); 234 | expect(retrieved?.checkpoint).toEqual(checkpoint); 235 | expect(retrieved?.metadata).toEqual(metadata); 236 | }); 237 | 238 | it('should handle checkpoints with null values', async () => { 239 | const config = { 240 | configurable: { 241 | thread_id: '1', 242 | }, 243 | }; 244 | 245 | const checkpoint = { 246 | id: 'checkpoint1', 247 | data: null, 248 | channel_values: null, 249 | } as unknown as Checkpoint; 250 | const metadata = { source: null } as unknown as CheckpointMetadata; 251 | 252 | await saver.put(config, checkpoint, metadata); 253 | 254 | const retrieved = await saver.getTuple(config); 255 | expect(retrieved?.checkpoint).toEqual(checkpoint); 256 | expect(retrieved?.metadata).toEqual(metadata); 257 | }); 258 | 259 | it('should handle item size limit exceeded error', async () => { 260 | // Create a large data payload 261 | const largeData = 'x'.repeat(500 * 1024); // 500 KB 262 | 263 | const config = { 264 | configurable: { 265 | thread_id: '1', 266 | }, 267 | }; 268 | 269 | const checkpoint = { 270 | id: 'checkpoint1', 271 | data: largeData, 272 | } as unknown as Checkpoint; 273 | const metadata = { 274 | source: 'update', 275 | step: -1, 276 | writes: null, 277 | } as CheckpointMetadata; 278 | 279 | expectErrorMessageToBeThrown( 280 | () => saver.put(config, checkpoint, metadata), 281 | 'Item size has exceeded the maximum allowed size' 282 | ); 283 | }); 284 | 285 | it('should handle special characters in keys and values', async () => { 286 | const config = { 287 | configurable: { 288 | thread_id: 'thread-特殊字符', 289 | }, 290 | }; 291 | 292 | const checkpoint = { 293 | id: 'checkpoint-特殊字符', 294 | data: 'data with special characters: 特殊字符', 295 | } as unknown as Checkpoint; 296 | const metadata = { 297 | source: 'update', 298 | step: -1, 299 | writes: null, 300 | } as CheckpointMetadata; 301 | 302 | await saver.put(config, checkpoint, metadata); 303 | 304 | const retrieved = await saver.getTuple(config); 305 | expect(retrieved?.checkpoint).toEqual(checkpoint); 306 | expect(retrieved?.metadata).toEqual(metadata); 307 | }); 308 | }); 309 | }); 310 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { DynamoDBSaver } from './saver'; 2 | -------------------------------------------------------------------------------- /src/saver.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DynamoDBClient, 3 | type DynamoDBClientConfig, 4 | } from '@aws-sdk/client-dynamodb'; 5 | import { DynamoDBDocument } from '@aws-sdk/lib-dynamodb'; 6 | import type { RunnableConfig } from '@langchain/core/runnables'; 7 | import { 8 | BaseCheckpointSaver, 9 | type SerializerProtocol, 10 | type CheckpointTuple, 11 | type Checkpoint, 12 | type CheckpointMetadata, 13 | type CheckpointPendingWrite, 14 | type CheckpointListOptions, 15 | type PendingWrite, 16 | } from '@langchain/langgraph-checkpoint'; 17 | import type { ValidatedConfigurable, CheckpointItem } from './types'; 18 | import { DynamoDBWriteItem, Write } from './write'; 19 | 20 | /** 21 | * DynamoDBSaver is a class that provides persistence to 22 | * Langgraph's graphs using AWS's DynamoDB. 23 | * 24 | * @class 25 | * @extends BaseCheckpointSaver 26 | * 27 | * @param {Object} params - The parameters for the constructor. 28 | * @param {DynamoDBClientConfig} [params.clientConfig] - Optional configuration for the DynamoDB client. 29 | * @param {SerializerProtocol} [params.serde] - Optional serializer protocol for serializing and deserializing data. 30 | * @param {string} params.checkpointsTableName - The name of the DynamoDB table for storing checkpoints. 31 | * @param {string} params.writesTableName - The name of the DynamoDB table for storing writes. 32 | * 33 | * @property {DynamoDBClient} client - The DynamoDB client instance. 34 | * @property {DynamoDBDocument} docClient - The DynamoDB document client instance. 35 | * @property {string} checkpointsTableName - The name of the DynamoDB table for storing checkpoints. 36 | * @property {string} writesTableName - The name of the DynamoDB table for storing writes. 37 | * 38 | * @method getTuple - Retrieves a checkpoint tuple based on the provided configuration. 39 | * @param {RunnableConfig} config - The configuration for the runnable. 40 | * @returns {Promise} - A promise that resolves to a checkpoint tuple or undefined. 41 | * 42 | * @method list - Lists checkpoint tuples based on the provided configuration and options. 43 | * @param {RunnableConfig} config - The configuration for the runnable. 44 | * @param {CheckpointListOptions} [options] - Optional options for listing checkpoints. 45 | * @returns {AsyncGenerator} - An async generator that yields checkpoint tuples. 46 | * 47 | * @method put - Saves a checkpoint and its metadata to the DynamoDB table. 48 | * @param {RunnableConfig} config - The configuration for the runnable. 49 | * @param {Checkpoint} checkpoint - The checkpoint to save. 50 | * @param {CheckpointMetadata} metadata - The metadata associated with the checkpoint. 51 | * @returns {Promise} - A promise that resolves to the updated runnable configuration. 52 | * 53 | * @method putWrites - Saves pending writes to the DynamoDB table. 54 | * @param {RunnableConfig} config - The configuration for the runnable. 55 | * @param {PendingWrite[]} writes - The pending writes to save. 56 | * @param {string} taskId - The task ID associated with the writes. 57 | * @returns {Promise} - A promise that resolves when the writes are saved. 58 | * 59 | * @private 60 | * @method getWritePartitionKey - Generates a partition key for a write item. 61 | * @param {Object} item - The write item. 62 | * @param {string} item.thread_id - The thread ID. 63 | * @param {string} item.checkpoint_id - The checkpoint ID. 64 | * @param {string} item.checkpoint_ns - The checkpoint namespace. 65 | * @returns {string} - The generated partition key. 66 | * 67 | * @private 68 | * @method getWriteSortKey - Generates a sort key for a write item. 69 | * @param {Object} item - The write item. 70 | * @param {string} item.task_id - The task ID. 71 | * @param {number} item.idx - The index of the write. 72 | * @returns {string} - The generated sort key. 73 | * 74 | * @private 75 | * @method validateConfigurable - Validates the configurable object. 76 | * @param {Record | undefined} configurable - The configurable object to validate. 77 | * @returns {ValidatedConfigurable} - The validated configurable object. 78 | * @throws {Error} - Throws an error if the configurable object is invalid. 79 | */ 80 | export class DynamoDBSaver extends BaseCheckpointSaver { 81 | private client: DynamoDBClient; 82 | private docClient: DynamoDBDocument; 83 | private checkpointsTableName: string; 84 | private writesTableName: string; 85 | 86 | constructor({ 87 | clientConfig, 88 | serde, 89 | checkpointsTableName, 90 | writesTableName, 91 | }: { 92 | clientConfig?: DynamoDBClientConfig; 93 | serde?: SerializerProtocol; 94 | checkpointsTableName: string; 95 | writesTableName: string; 96 | }) { 97 | super(serde); 98 | this.client = new DynamoDBClient(clientConfig || {}); 99 | this.docClient = DynamoDBDocument.from(this.client); 100 | this.checkpointsTableName = checkpointsTableName; 101 | this.writesTableName = writesTableName; 102 | } 103 | 104 | async getTuple( 105 | config: RunnableConfig 106 | ): Promise { 107 | const getItem = async (configurable: ValidatedConfigurable) => { 108 | if (configurable.checkpoint_id != null) { 109 | // Use get 110 | const item = await this.docClient.get({ 111 | TableName: this.checkpointsTableName, 112 | Key: { 113 | thread_id: configurable.thread_id, 114 | checkpoint_id: configurable.checkpoint_id, 115 | }, 116 | }); 117 | 118 | return item.Item as CheckpointItem | undefined; 119 | } else { 120 | // Use query 121 | const result = await this.docClient.query({ 122 | TableName: this.checkpointsTableName, 123 | KeyConditionExpression: 'thread_id = :thread_id', 124 | ExpressionAttributeValues: { 125 | ':thread_id': configurable.thread_id, 126 | ...(configurable.checkpoint_ns && { 127 | ':checkpoint_ns': configurable.checkpoint_ns, 128 | }), 129 | }, 130 | ...(configurable.checkpoint_ns && { 131 | FilterExpression: 'checkpoint_ns = :checkpoint_ns', 132 | }), 133 | Limit: 1, 134 | ConsistentRead: true, 135 | ScanIndexForward: false, // Descending order 136 | }); 137 | 138 | return result.Items?.[0] as CheckpointItem | undefined; 139 | } 140 | }; 141 | 142 | const item = await getItem( 143 | this.validateConfigurable(config.configurable) 144 | ); 145 | if (!item) { 146 | return undefined; 147 | } 148 | 149 | const checkpoint = (await this.serde.loadsTyped( 150 | item.type, 151 | item.checkpoint 152 | )) as Checkpoint; 153 | const metadata = (await this.serde.loadsTyped( 154 | item.type, 155 | item.metadata 156 | )) as CheckpointMetadata; 157 | 158 | // Fetch pending writes 159 | const writesResult = await this.docClient.query({ 160 | TableName: this.writesTableName, 161 | KeyConditionExpression: 162 | 'thread_id_checkpoint_id_checkpoint_ns = :thread_id_checkpoint_id_checkpoint_ns', 163 | ExpressionAttributeValues: { 164 | ':thread_id_checkpoint_id_checkpoint_ns': 165 | Write.getPartitionKey(item), 166 | }, 167 | }); 168 | 169 | const pendingWrites: CheckpointPendingWrite[] = []; 170 | if (writesResult.Items) { 171 | for (const writeItem of writesResult.Items as DynamoDBWriteItem[]) { 172 | const write = Write.fromDynamoDBItem(writeItem); 173 | const value = await this.serde.loadsTyped( 174 | write.type, 175 | write.value 176 | ); 177 | pendingWrites.push([write.task_id, write.channel, value]); 178 | } 179 | } 180 | 181 | return { 182 | config: { 183 | configurable: { 184 | thread_id: item.thread_id, 185 | checkpoint_ns: item.checkpoint_ns, 186 | checkpoint_id: item.checkpoint_id, 187 | }, 188 | }, 189 | checkpoint, 190 | metadata, 191 | parentConfig: item.parent_checkpoint_id 192 | ? { 193 | configurable: { 194 | thread_id: item.thread_id, 195 | checkpoint_ns: item.checkpoint_ns, 196 | checkpoint_id: item.parent_checkpoint_id, 197 | }, 198 | } 199 | : undefined, 200 | pendingWrites, 201 | }; 202 | } 203 | 204 | async *list( 205 | config: RunnableConfig, 206 | options?: CheckpointListOptions 207 | ): AsyncGenerator { 208 | const { limit, before } = options ?? {}; 209 | const thread_id = config.configurable?.thread_id; 210 | 211 | const expressionAttributeValues: Record = { 212 | ':thread_id': thread_id, 213 | }; 214 | let keyConditionExpression = 'thread_id = :thread_id'; 215 | 216 | if (before?.configurable?.checkpoint_id) { 217 | keyConditionExpression += 218 | ' AND checkpoint_id < :before_checkpoint_id'; 219 | expressionAttributeValues[':before_checkpoint_id'] = 220 | before.configurable.checkpoint_id; 221 | } 222 | 223 | const result = await this.docClient.query({ 224 | TableName: this.checkpointsTableName, 225 | KeyConditionExpression: keyConditionExpression, 226 | ExpressionAttributeValues: expressionAttributeValues, 227 | Limit: limit, 228 | ScanIndexForward: false, // Descending order 229 | }); 230 | 231 | if (result.Items) { 232 | for (const item of result.Items as CheckpointItem[]) { 233 | const checkpoint = (await this.serde.loadsTyped( 234 | item.type, 235 | item.checkpoint 236 | )) as Checkpoint; 237 | const metadata = (await this.serde.loadsTyped( 238 | item.type, 239 | item.metadata 240 | )) as CheckpointMetadata; 241 | 242 | yield { 243 | config: { 244 | configurable: { 245 | thread_id: item.thread_id, 246 | checkpoint_ns: item.checkpoint_ns, 247 | checkpoint_id: item.checkpoint_id, 248 | }, 249 | }, 250 | checkpoint, 251 | metadata, 252 | parentConfig: item.parent_checkpoint_id 253 | ? { 254 | configurable: { 255 | thread_id: item.thread_id, 256 | checkpoint_ns: item.checkpoint_ns, 257 | checkpoint_id: item.parent_checkpoint_id, 258 | }, 259 | } 260 | : undefined, 261 | }; 262 | } 263 | } 264 | } 265 | 266 | async put( 267 | config: RunnableConfig, 268 | checkpoint: Checkpoint, 269 | metadata: CheckpointMetadata 270 | ): Promise { 271 | const { thread_id } = this.validateConfigurable(config.configurable); 272 | 273 | const [type1, serializedCheckpoint] = this.serde.dumpsTyped(checkpoint); 274 | const [type2, serializedMetadata] = this.serde.dumpsTyped(metadata); 275 | 276 | if (type1 !== type2) { 277 | throw new Error( 278 | 'Failed to serialize checkpoint and metadata to the same type.' 279 | ); 280 | } 281 | 282 | const item: CheckpointItem = { 283 | thread_id, 284 | checkpoint_ns: config.configurable?.checkpoint_ns ?? '', 285 | checkpoint_id: checkpoint.id!, 286 | parent_checkpoint_id: config.configurable?.checkpoint_id, 287 | type: type1, 288 | checkpoint: serializedCheckpoint, 289 | metadata: serializedMetadata, 290 | }; 291 | 292 | await this.docClient.put({ 293 | TableName: this.checkpointsTableName, 294 | Item: item, 295 | }); 296 | 297 | return { 298 | configurable: { 299 | thread_id: item.thread_id, 300 | checkpoint_ns: item.checkpoint_ns, 301 | checkpoint_id: item.checkpoint_id, 302 | }, 303 | }; 304 | } 305 | 306 | async putWrites( 307 | config: RunnableConfig, 308 | writes: PendingWrite[], 309 | taskId: string 310 | ): Promise { 311 | const { thread_id, checkpoint_ns, checkpoint_id } = 312 | this.validateConfigurable(config.configurable); 313 | 314 | if (checkpoint_id == null) { 315 | throw new Error('Missing checkpoint_id'); 316 | } 317 | 318 | const writeItems = writes.map((write, idx) => { 319 | const [type, serializedValue] = this.serde.dumpsTyped(write[1]); 320 | const item = new Write({ 321 | thread_id, 322 | checkpoint_ns, 323 | checkpoint_id, 324 | task_id: taskId, 325 | idx, 326 | channel: write[0], 327 | type, 328 | value: serializedValue, 329 | }); 330 | 331 | return { 332 | PutRequest: { 333 | Item: item.toDynamoDBItem(), 334 | }, 335 | }; 336 | }); 337 | 338 | // Batch write items 339 | const batches = []; 340 | for (let i = 0; i < writeItems.length; i += 25) { 341 | batches.push(writeItems.slice(i, i + 25)); 342 | } 343 | 344 | for (const batch of batches) { 345 | await this.docClient.batchWrite({ 346 | RequestItems: { 347 | [this.writesTableName]: batch, 348 | }, 349 | }); 350 | } 351 | } 352 | 353 | private getWritePartitionKey(item: { 354 | thread_id: string; 355 | checkpoint_id: string; 356 | checkpoint_ns: string; 357 | }): string { 358 | return `${item.thread_id}:${item.checkpoint_id}:${item.checkpoint_ns}`; 359 | } 360 | 361 | private getWriteSortKey(item: { task_id: string; idx: number }): string { 362 | return `${item.task_id}:${item.idx}`; 363 | } 364 | 365 | private validateConfigurable( 366 | configurable: Record | undefined 367 | ): ValidatedConfigurable { 368 | if (!configurable) { 369 | throw new Error('Missing configurable'); 370 | } 371 | 372 | const { thread_id, checkpoint_ns, checkpoint_id } = configurable; 373 | 374 | if (typeof thread_id !== 'string') { 375 | throw new Error('Invalid thread_id'); 376 | } 377 | 378 | if (typeof checkpoint_ns !== 'string' && checkpoint_ns !== undefined) { 379 | throw new Error('Invalid checkpoint_ns'); 380 | } 381 | 382 | if (typeof checkpoint_id !== 'string' && checkpoint_id !== undefined) { 383 | throw new Error('Invalid checkpoint_id'); 384 | } 385 | 386 | return { 387 | thread_id, 388 | checkpoint_ns: checkpoint_ns ?? '', 389 | checkpoint_id: checkpoint_id, 390 | }; 391 | } 392 | } 393 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Represents an item in the checkpoint system. 3 | * 4 | * @interface CheckpointItem 5 | * 6 | * @property {string} thread_id - The unique identifier for the thread. 7 | * @property {string} checkpoint_ns - The namespace of the checkpoint. 8 | * @property {string} checkpoint_id - The unique identifier for the checkpoint. 9 | * @property {string} [parent_checkpoint_id] - The optional identifier for the parent checkpoint. 10 | * @property {string} type - The type of the checkpoint. 11 | * @property {Uint8Array} checkpoint - The checkpoint data. 12 | * @property {Uint8Array} metadata - The metadata associated with the checkpoint. 13 | */ 14 | export interface CheckpointItem { 15 | thread_id: string; 16 | checkpoint_ns: string; 17 | checkpoint_id: string; 18 | parent_checkpoint_id?: string; 19 | type: string; 20 | checkpoint: Uint8Array; 21 | metadata: Uint8Array; 22 | } 23 | 24 | /** 25 | * Represents a configuration that has been validated. 26 | * 27 | * @interface ValidatedConfigurable 28 | * @property {string} thread_id - The unique identifier for the thread. 29 | * @property {string} checkpoint_ns - The namespace for the checkpoint. 30 | * @property {string | undefined} checkpoint_id - The unique identifier for the checkpoint, which may be undefined. 31 | */ 32 | export interface ValidatedConfigurable { 33 | thread_id: string; 34 | checkpoint_ns: string; 35 | checkpoint_id: string | undefined; 36 | } 37 | -------------------------------------------------------------------------------- /src/write.ts: -------------------------------------------------------------------------------- 1 | export interface WriteProperties { 2 | thread_id: string; 3 | checkpoint_ns: string; 4 | checkpoint_id: string; 5 | task_id: string; 6 | idx: number; 7 | channel: string; 8 | type: string; 9 | value: Uint8Array; 10 | } 11 | 12 | export interface DynamoDBWriteItem { 13 | thread_id_checkpoint_id_checkpoint_ns: string; 14 | task_id_idx: string; 15 | channel: string; 16 | type: string; 17 | value: Uint8Array; 18 | } 19 | 20 | /** 21 | * The `Write` class represents a write operation to be stored in DynamoDB. 22 | * It contains various properties related to the write operation and provides 23 | * methods to convert to and from DynamoDB items. 24 | * 25 | * @class 26 | * @property {string} thread_id - The ID of the thread. 27 | * @property {string} checkpoint_ns - The namespace of the checkpoint. 28 | * @property {string} checkpoint_id - The ID of the checkpoint. 29 | * @property {string} task_id - The ID of the task. 30 | * @property {number} idx - The index of the write operation. 31 | * @property {string} channel - The channel of the write operation. 32 | * @property {string} type - The type of the write operation. 33 | * @property {Uint8Array} value - The value of the write operation. 34 | * 35 | * @constructor 36 | * @param {Object} WriteProperties - The properties to initialize the `Write` instance. 37 | * @param {string} WriteProperties.thread_id - The ID of the thread. 38 | * @param {string} WriteProperties.checkpoint_ns - The namespace of the checkpoint. 39 | * @param {string} WriteProperties.checkpoint_id - The ID of the checkpoint. 40 | * @param {string} WriteProperties.task_id - The ID of the task. 41 | * @param {number} WriteProperties.idx - The index of the write operation. 42 | * @param {string} WriteProperties.channel - The channel of the write operation. 43 | * @param {string} WriteProperties.type - The type of the write operation. 44 | * @param {Uint8Array} WriteProperties.value - The value of the write operation. 45 | * 46 | * @method toDynamoDBItem 47 | * @returns {DynamoDBWriteItem} The DynamoDB item representation of the write operation. 48 | * 49 | * @method static fromDynamoDBItem 50 | * @param {DynamoDBWriteItem} DynamoDBWriteItem - The DynamoDB item to convert. 51 | * @returns {Write} The `Write` instance created from the DynamoDB item. 52 | * 53 | * @method static getPartitionKey 54 | * @param {Object} params - The parameters to generate the partition key. 55 | * @param {string} params.thread_id - The ID of the thread. 56 | * @param {string} params.checkpoint_id - The ID of the checkpoint. 57 | * @param {string} params.checkpoint_ns - The namespace of the checkpoint. 58 | * @returns {string} The partition key. 59 | * 60 | * @method static separator 61 | * @returns {string} The separator used in partition keys and other composite keys. 62 | */ 63 | export class Write { 64 | readonly thread_id: string; 65 | readonly checkpoint_ns: string; 66 | readonly checkpoint_id: string; 67 | readonly task_id: string; 68 | readonly idx: number; 69 | readonly channel: string; 70 | readonly type: string; 71 | readonly value: Uint8Array; 72 | 73 | constructor({ 74 | thread_id, 75 | checkpoint_ns, 76 | checkpoint_id, 77 | task_id, 78 | idx, 79 | channel, 80 | type, 81 | value, 82 | }: WriteProperties) { 83 | this.thread_id = thread_id; 84 | this.checkpoint_ns = checkpoint_ns; 85 | this.checkpoint_id = checkpoint_id; 86 | this.task_id = task_id; 87 | this.idx = idx; 88 | this.channel = channel; 89 | this.type = type; 90 | this.value = value; 91 | } 92 | 93 | toDynamoDBItem(): DynamoDBWriteItem { 94 | return { 95 | thread_id_checkpoint_id_checkpoint_ns: Write.getPartitionKey({ 96 | thread_id: this.thread_id, 97 | checkpoint_id: this.checkpoint_id, 98 | checkpoint_ns: this.checkpoint_ns, 99 | }), 100 | task_id_idx: [this.task_id, this.idx].join(Write.separator()), 101 | channel: this.channel, 102 | type: this.type, 103 | value: this.value, 104 | }; 105 | } 106 | 107 | static fromDynamoDBItem({ 108 | thread_id_checkpoint_id_checkpoint_ns, 109 | task_id_idx, 110 | channel, 111 | type, 112 | value, 113 | }: DynamoDBWriteItem): Write { 114 | const [thread_id, checkpoint_id, checkpoint_ns] = 115 | thread_id_checkpoint_id_checkpoint_ns.split(this.separator()); 116 | const [task_id, idx] = task_id_idx.split(this.separator()); 117 | return new Write({ 118 | thread_id, 119 | checkpoint_ns, 120 | checkpoint_id, 121 | task_id, 122 | idx: parseInt(idx, 10), 123 | channel, 124 | type, 125 | value, 126 | }); 127 | } 128 | 129 | static getPartitionKey({ 130 | thread_id, 131 | checkpoint_id, 132 | checkpoint_ns, 133 | }: { 134 | thread_id: string; 135 | checkpoint_id: string; 136 | checkpoint_ns: string; 137 | }): string { 138 | return [thread_id, checkpoint_id, checkpoint_ns].join( 139 | Write.separator() 140 | ); 141 | } 142 | 143 | static separator() { 144 | return ':::'; 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /tests/__snapshots__/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/researchwiseai/langgraphjs-checkpoint-dynamodb/b32070be6e575feb5d364e60018506564112fa3e/tests/__snapshots__/.gitkeep -------------------------------------------------------------------------------- /tests/__snapshots__/integration.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Bun Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`DynamoDBSaver integration with DynamoDB and a workflow should save and load history 1`] = ` 4 | { 5 | "messages": [ 6 | HumanMessage HumanMessage { 7 | "additional_kwargs": {}, 8 | "content": "Hello from Human", 9 | "id": undefined, 10 | "lc_kwargs": { 11 | "additional_kwargs": {}, 12 | "content": "Hello from Human", 13 | "response_metadata": {}, 14 | }, 15 | "lc_namespace": [ 16 | "langchain_core", 17 | "messages", 18 | ], 19 | "lc_serializable": true, 20 | "name": undefined, 21 | "response_metadata": {}, 22 | }, 23 | AIMessage AIMessage { 24 | "additional_kwargs": {}, 25 | "content": "Hello from NodeA", 26 | "id": undefined, 27 | "invalid_tool_calls": [], 28 | "lc_kwargs": { 29 | "additional_kwargs": {}, 30 | "content": "Hello from NodeA", 31 | "invalid_tool_calls": [], 32 | "response_metadata": {}, 33 | "tool_calls": [], 34 | }, 35 | "lc_namespace": [ 36 | "langchain_core", 37 | "messages", 38 | ], 39 | "lc_serializable": true, 40 | "name": undefined, 41 | "response_metadata": {}, 42 | "tool_calls": [], 43 | "usage_metadata": undefined, 44 | }, 45 | AIMessage AIMessage { 46 | "additional_kwargs": {}, 47 | "content": "Hello from NodeB", 48 | "id": undefined, 49 | "invalid_tool_calls": [], 50 | "lc_kwargs": { 51 | "additional_kwargs": {}, 52 | "content": "Hello from NodeB", 53 | "invalid_tool_calls": [], 54 | "response_metadata": {}, 55 | "tool_calls": [], 56 | }, 57 | "lc_namespace": [ 58 | "langchain_core", 59 | "messages", 60 | ], 61 | "lc_serializable": true, 62 | "name": undefined, 63 | "response_metadata": {}, 64 | "tool_calls": [], 65 | "usage_metadata": undefined, 66 | }, 67 | ], 68 | } 69 | `; 70 | 71 | exports[`DynamoDBSaver integration with DynamoDB and a workflow should save and load history 2`] = ` 72 | { 73 | "checkpoint": { 74 | "channel_values": { 75 | "NodeB": "NodeB", 76 | "messages": [ 77 | HumanMessage HumanMessage { 78 | "additional_kwargs": {}, 79 | "content": "Hello from Human", 80 | "id": undefined, 81 | "lc_kwargs": { 82 | "additional_kwargs": {}, 83 | "content": "Hello from Human", 84 | "response_metadata": {}, 85 | }, 86 | "lc_namespace": [ 87 | "langchain_core", 88 | "messages", 89 | ], 90 | "lc_serializable": true, 91 | "name": undefined, 92 | "response_metadata": {}, 93 | }, 94 | AIMessage AIMessage { 95 | "additional_kwargs": {}, 96 | "content": "Hello from NodeA", 97 | "id": undefined, 98 | "invalid_tool_calls": [], 99 | "lc_kwargs": { 100 | "additional_kwargs": {}, 101 | "content": "Hello from NodeA", 102 | "invalid_tool_calls": [], 103 | "response_metadata": {}, 104 | "tool_calls": [], 105 | }, 106 | "lc_namespace": [ 107 | "langchain_core", 108 | "messages", 109 | ], 110 | "lc_serializable": true, 111 | "name": undefined, 112 | "response_metadata": {}, 113 | "tool_calls": [], 114 | "usage_metadata": undefined, 115 | }, 116 | AIMessage AIMessage { 117 | "additional_kwargs": {}, 118 | "content": "Hello from NodeB", 119 | "id": undefined, 120 | "invalid_tool_calls": [], 121 | "lc_kwargs": { 122 | "additional_kwargs": {}, 123 | "content": "Hello from NodeB", 124 | "invalid_tool_calls": [], 125 | "response_metadata": {}, 126 | "tool_calls": [], 127 | }, 128 | "lc_namespace": [ 129 | "langchain_core", 130 | "messages", 131 | ], 132 | "lc_serializable": true, 133 | "name": undefined, 134 | "response_metadata": {}, 135 | "tool_calls": [], 136 | "usage_metadata": undefined, 137 | }, 138 | ], 139 | }, 140 | "channel_versions": { 141 | "NodeA": 4, 142 | "NodeB": 4, 143 | "__start__": 2, 144 | "__start__:NodeA": 3, 145 | "messages": 4, 146 | }, 147 | "id": Any, 148 | "pending_sends": [], 149 | "ts": "2022-01-01T03:00:00.000Z", 150 | "v": 1, 151 | "versions_seen": { 152 | "NodeA": { 153 | "__start__:NodeA": 2, 154 | }, 155 | "NodeB": { 156 | "NodeA": 3, 157 | }, 158 | "__input__": {}, 159 | "__start__": { 160 | "__start__": 1, 161 | }, 162 | }, 163 | }, 164 | "config": { 165 | "configurable": { 166 | "checkpoint_id": Any, 167 | "checkpoint_ns": "", 168 | "thread_id": "1", 169 | }, 170 | }, 171 | "metadata": { 172 | "parents": {}, 173 | "source": "loop", 174 | "step": 2, 175 | "writes": { 176 | "NodeB": { 177 | "messages": [ 178 | AIMessage AIMessage { 179 | "additional_kwargs": {}, 180 | "content": "Hello from NodeB", 181 | "id": undefined, 182 | "invalid_tool_calls": [], 183 | "lc_kwargs": { 184 | "additional_kwargs": {}, 185 | "content": "Hello from NodeB", 186 | "invalid_tool_calls": [], 187 | "response_metadata": {}, 188 | "tool_calls": [], 189 | }, 190 | "lc_namespace": [ 191 | "langchain_core", 192 | "messages", 193 | ], 194 | "lc_serializable": true, 195 | "name": undefined, 196 | "response_metadata": {}, 197 | "tool_calls": [], 198 | "usage_metadata": undefined, 199 | }, 200 | ], 201 | }, 202 | }, 203 | }, 204 | "parentConfig": { 205 | "configurable": { 206 | "checkpoint_id": Any, 207 | "checkpoint_ns": "", 208 | "thread_id": "1", 209 | }, 210 | }, 211 | "pendingWrites": [], 212 | } 213 | `; 214 | -------------------------------------------------------------------------------- /tests/integration.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach, setSystemTime } from 'bun:test'; 2 | import { DynamoDBSaver } from '../src/saver'; 3 | import { 4 | DynamoDBClient, 5 | CreateTableCommand, 6 | DeleteTableCommand, 7 | DescribeTableCommand, 8 | } from '@aws-sdk/client-dynamodb'; 9 | import { CheckpointMetadata, uuid6 } from '@langchain/langgraph-checkpoint'; 10 | import { Annotation, END, START, StateGraph } from '@langchain/langgraph'; 11 | import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; 12 | 13 | setSystemTime(new Date('2022-01-01T03:00:00.000Z')); 14 | 15 | // Helper function to wait for table to become ACTIVE 16 | async function waitForTableActive(client: DynamoDBClient, tableName: string) { 17 | while (true) { 18 | const { Table } = await client.send(new DescribeTableCommand({ TableName: tableName })); 19 | if (Table?.TableStatus === 'ACTIVE') { 20 | break; 21 | } 22 | await new Promise(resolve => setTimeout(resolve, 1000)); 23 | } 24 | } 25 | 26 | async function waitForTableDeleted(client: DynamoDBClient, tableName: string) { 27 | while (true) { 28 | try { 29 | await client.send(new DescribeTableCommand({ TableName: tableName })); 30 | } catch (e) { 31 | if (e.name === 'ResourceNotFoundException') { 32 | break; 33 | } 34 | } 35 | await new Promise(resolve => setTimeout(resolve, 1000)); 36 | } 37 | } 38 | 39 | describe('DynamoDBSaver', () => { 40 | const checkpointsTableName = 'checkpoints'; 41 | const writesTableName = 'writes'; 42 | 43 | const saver = new DynamoDBSaver({ 44 | clientConfig: { 45 | endpoint: process.env.AWS_DYNAMODB_ENDPOINT, 46 | }, 47 | checkpointsTableName, 48 | writesTableName, 49 | }); 50 | 51 | describe('integration with DynamoDB', () => { 52 | beforeEach(async () => { 53 | console.log('Creating tables'); 54 | 55 | const client = new DynamoDBClient({ 56 | endpoint: process.env.AWS_DYNAMODB_ENDPOINT, 57 | }); 58 | 59 | await client.send( 60 | new CreateTableCommand({ 61 | TableName: checkpointsTableName, 62 | KeySchema: [ 63 | { AttributeName: 'thread_id', KeyType: 'HASH' }, // Partition key 64 | { AttributeName: 'checkpoint_id', KeyType: 'RANGE' }, // Sort key 65 | ], 66 | AttributeDefinitions: [ 67 | { AttributeName: 'thread_id', AttributeType: 'S' }, 68 | { AttributeName: 'checkpoint_id', AttributeType: 'S' }, 69 | ], 70 | BillingMode: 'PAY_PER_REQUEST', 71 | }) 72 | ); 73 | 74 | await client.send( 75 | new CreateTableCommand({ 76 | TableName: writesTableName, 77 | KeySchema: [ 78 | { AttributeName: 'thread_id_checkpoint_id_checkpoint_ns', KeyType: 'HASH' }, // Partition key 79 | { AttributeName: 'task_id_idx', KeyType: 'RANGE' }, // Sort key 80 | ], 81 | AttributeDefinitions: [ 82 | { 83 | AttributeName: 'thread_id_checkpoint_id_checkpoint_ns', 84 | AttributeType: 'S', 85 | }, 86 | { AttributeName: 'task_id_idx', AttributeType: 'S' }, 87 | ], 88 | BillingMode: 'PAY_PER_REQUEST', 89 | }) 90 | ); 91 | 92 | await waitForTableActive(client, checkpointsTableName); 93 | await waitForTableActive(client, writesTableName); 94 | 95 | console.log('Tables created'); 96 | }); 97 | 98 | afterEach(async () => { 99 | console.log('Deleting tables'); 100 | const client = new DynamoDBClient({ 101 | endpoint: process.env.AWS_DYNAMODB_ENDPOINT, 102 | }); 103 | 104 | await client.send( 105 | new DeleteTableCommand({ 106 | TableName: checkpointsTableName, 107 | }) 108 | ); 109 | 110 | await client.send( 111 | new DeleteTableCommand({ 112 | TableName: writesTableName, 113 | }) 114 | ); 115 | 116 | await waitForTableDeleted(client, checkpointsTableName); 117 | await waitForTableDeleted(client, writesTableName); 118 | 119 | console.log('Tables deleted'); 120 | }); 121 | 122 | it('should save and load checkpoints', async () => { 123 | const checkpoint = { 124 | v: 1, 125 | id: uuid6(-1), 126 | ts: '2024-04-19T17:19:07.952Z', 127 | channel_values: { 128 | someKey1: 'someValue1', 129 | }, 130 | channel_versions: { 131 | someKey2: 1, 132 | }, 133 | versions_seen: { 134 | someKey3: { 135 | someKey4: 1, 136 | }, 137 | }, 138 | pending_sends: [], 139 | }; 140 | 141 | await saver.put({ configurable: { thread_id: '1' } }, checkpoint, { 142 | source: 'update', 143 | step: -1, 144 | writes: null, 145 | } as CheckpointMetadata); 146 | 147 | const loadedCheckpoint = await saver.getTuple({ 148 | configurable: { thread_id: '1' }, 149 | }); 150 | 151 | expect(loadedCheckpoint).not.toBeUndefined(); 152 | expect(loadedCheckpoint?.checkpoint.id).toEqual(checkpoint.id); 153 | }); 154 | 155 | it('should save and load writes', async () => { 156 | const checkpoint = { 157 | v: 1, 158 | id: uuid6(-1), 159 | ts: '2024-04-19T17:19:07.952Z', 160 | channel_values: { 161 | someKey1: 'someValue1', 162 | }, 163 | channel_versions: { 164 | someKey2: 1, 165 | }, 166 | versions_seen: { 167 | someKey3: { 168 | someKey4: 1, 169 | }, 170 | }, 171 | pending_sends: [], 172 | }; 173 | 174 | const writes = { 175 | writes: [ 176 | { 177 | id: '1', 178 | v: 1, 179 | ts: '2024-04-19T17:19:07.952Z', 180 | channel_values: { 181 | someKey1: 'someValue1', 182 | }, 183 | channel_versions: { 184 | someKey2: 1, 185 | }, 186 | versions_seen: { 187 | someKey3: { 188 | someKey4: 1, 189 | }, 190 | }, 191 | pending_sends: [], 192 | }, 193 | ], 194 | }; 195 | 196 | await saver.put({ configurable: { thread_id: '1' } }, checkpoint, { 197 | source: 'update', 198 | step: -1, 199 | writes, 200 | parents: {}, 201 | } as CheckpointMetadata); 202 | 203 | const loadedWrites = await saver.getTuple({ 204 | configurable: { thread_id: '1' }, 205 | }); 206 | 207 | expect(loadedWrites).not.toBeUndefined(); 208 | expect(loadedWrites?.metadata?.writes).toEqual(writes); 209 | }); 210 | 211 | describe('and a workflow', () => { 212 | it('should save and load history', async () => { 213 | const AgentState = Annotation.Root({ 214 | messages: Annotation({ 215 | reducer: (x, y) => x.concat(y), 216 | default: () => [], 217 | }), 218 | }); 219 | 220 | const workflow = new StateGraph(AgentState) 221 | .addNode('NodeA', async () => { 222 | return { 223 | messages: [ 224 | new AIMessage({ 225 | content: 'Hello from NodeA', 226 | }), 227 | ], 228 | }; 229 | }) 230 | .addNode('NodeB', async () => { 231 | return { 232 | messages: [ 233 | new AIMessage({ 234 | content: 'Hello from NodeB', 235 | }), 236 | ], 237 | }; 238 | }); 239 | 240 | workflow.addEdge(START, 'NodeA'); 241 | workflow.addEdge('NodeA', 'NodeB'); 242 | workflow.addEdge('NodeB', END); 243 | 244 | const graph = workflow.compile({ 245 | checkpointer: saver, 246 | }); 247 | 248 | const config = { configurable: { thread_id: '1' } }; 249 | 250 | let loadedTuple = await saver.getTuple(config); 251 | expect(loadedTuple).toBeUndefined(); 252 | 253 | const answer = await graph.invoke( 254 | { 255 | messages: [ 256 | new HumanMessage({ 257 | content: 'Hello from Human', 258 | }), 259 | ], 260 | }, 261 | config 262 | ); 263 | 264 | expect(answer).toMatchSnapshot(); 265 | 266 | loadedTuple = await saver.getTuple(config); 267 | 268 | loadedTuple?.parentConfig; 269 | 270 | expect(loadedTuple).toMatchSnapshot({ 271 | checkpoint: { 272 | id: expect.any(String), 273 | }, 274 | config: { 275 | configurable: { 276 | checkpoint_id: expect.any(String), 277 | }, 278 | }, 279 | parentConfig: { 280 | configurable: { 281 | checkpoint_id: expect.any(String), 282 | }, 283 | }, 284 | }); 285 | }); 286 | }); 287 | }); 288 | }); 289 | -------------------------------------------------------------------------------- /tsconfig.cjs.json: -------------------------------------------------------------------------------- 1 | // tsconfig.cjs.json 2 | { 3 | "extends": "./tsconfig.json", 4 | "compilerOptions": { 5 | "outDir": "./dist/cjs", 6 | "module": "CommonJS", 7 | "target": "ES2022", 8 | "declaration": true, 9 | "declarationDir": "./dist/cjs", 10 | "sourceMap": true, 11 | "moduleResolution": "Node", 12 | "importHelpers": true, 13 | "noEmit": false, 14 | "rootDir": "./src", 15 | "preserveSymlinks": true 16 | }, 17 | "include": ["src/**/*"], 18 | "exclude": ["node_modules", "dist", "**/__tests__/**", "**/__mocks__/**"] 19 | } 20 | -------------------------------------------------------------------------------- /tsconfig.esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist/esm", 5 | "module": "ESNext", 6 | "target": "ES2022", 7 | "declaration": true, 8 | "declarationDir": "./dist/esm", 9 | "sourceMap": true, 10 | "moduleResolution": "Node", 11 | "importHelpers": true, 12 | "noEmit": false, 13 | "rootDir": "./src", 14 | "preserveSymlinks": true 15 | }, 16 | "include": ["src/**/*"], 17 | "exclude": ["node_modules", "dist", "**/__tests__/**", "**/__mocks__/**"] 18 | } 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "moduleResolution": "Node", 5 | "strict": true, 6 | "esModuleInterop": true, 7 | "skipLibCheck": true, 8 | "forceConsistentCasingInFileNames": true, 9 | "allowSyntheticDefaultImports": true, 10 | "resolveJsonModule": true, 11 | "rootDir": "./src", 12 | "preserveSymlinks": true, 13 | "noEmit": true // Prevent emitting when using this config directly 14 | }, 15 | "include": ["src/**/*"], 16 | "exclude": ["node_modules", "dist"] 17 | } 18 | --------------------------------------------------------------------------------