├── .eslintignore ├── .eslintrc.js ├── .github └── workflows │ └── test.yml ├── .gitignore ├── .npmignore ├── .prettierignore ├── .prettierrc ├── .travis.yml ├── LICENSE.md ├── README.md ├── docs ├── .nojekyll ├── assets │ ├── highlight.css │ ├── main.js │ ├── search.js │ └── style.css ├── classes │ ├── collectionnotfoundexception.html │ ├── configurationexception.html │ ├── conflictexception.html │ ├── indexnotfoundexception.html │ ├── invalidbatchreplacedeletedescriptorexception.html │ ├── invalidcompositeconditionexception.html │ ├── invalididexception.html │ ├── invalidindexedfieldvalueexception.html │ ├── invalidparentidexception.html │ ├── invalidqueryexception.html │ ├── invalidupdatesexception.html │ └── invalidupdatevalueexception.html ├── functions │ ├── batchFindByIds.html │ ├── batchReplaceDelete.html │ ├── createContext.html │ ├── deleteById.html │ ├── deleteChildById.html │ ├── find.html │ ├── findById.html │ ├── findByIdWithChildren.html │ ├── findChildById.html │ ├── findChildren.html │ ├── insert.html │ ├── replace.html │ ├── updateById.html │ └── updateChildById.html ├── index.html ├── interfaces │ ├── accesspattern.html │ ├── childcollection.html │ ├── collectionlayout.html │ └── rootcollection.html ├── modules.html └── types │ ├── AccessPatternOptions.html │ ├── AddValueChange.html │ ├── AndCondition.html │ ├── AppendDeleteSetChange.html │ ├── BatchDeleteDescriptor.html │ ├── BatchFindByIdDescriptor.html │ ├── BatchFindByIdsResponse.html │ ├── BatchReplaceDeleteDescriptor.html │ ├── BatchReplaceDeleteResponse.html │ ├── BatchReplaceDescriptor.html │ ├── BeginsWithCondition.html │ ├── BetweenCondition.html │ ├── ChangesUpdates.html │ ├── Collection.html │ ├── ComparisonCondition.html │ ├── CompositeCondition.html │ ├── ConditionValue.html │ ├── ContainsCondition.html │ ├── Context.html │ ├── DocumentWithId.html │ ├── DynamoDBSet.html │ ├── DynamoDBType.html │ ├── EqCondition.html │ ├── ExistsCondition.html │ ├── FindByIdWithChildrenResult.html │ ├── FindChildrenOptions.html │ ├── FindChildrenResults.html │ ├── FindOptions.html │ ├── FindQuery.html │ ├── FindResults.html │ ├── GtCondition.html │ ├── GtEqCondition.html │ ├── InCondition.html │ ├── Key.html │ ├── KeyPath.html │ ├── KeyPathsAndClause.html │ ├── KeyRangeExpression.html │ ├── LtCondition.html │ ├── LtEqCondition.html │ ├── NormaliserFunction.html │ ├── NotCondition.html │ ├── NotEqCondition.html │ ├── OperationUpdates.html │ ├── OrCondition.html │ ├── ParseElement.html │ ├── PrimaryIndexLayout.html │ ├── QueryOperator.html │ ├── RemoveChange.html │ ├── SecondaryIndexLayout.html │ ├── SetChange.html │ ├── SetValuesDocument.html │ ├── TypeCondition.html │ ├── Updates.html │ └── WrappedDocument.html ├── esbuild.config.js ├── examples ├── adjacency_list.ts ├── basic_children.ts ├── hierarchy.ts └── simplestore │ └── simple_store.ts ├── jest-dynalite-config.js ├── jest.config.ts ├── package-lock.json ├── package.json ├── src ├── base │ ├── access_pattern.ts │ ├── coerce_error.ts │ ├── collection.ts │ ├── collection_definition.ts │ ├── common.ts │ ├── conditions.ts │ ├── conditions_parser.test.ts │ ├── conditions_parser.ts │ ├── conditions_types.ts │ ├── exceptions.ts │ ├── expression_util.test.ts │ ├── expression_util.ts │ ├── layout.ts │ ├── lexo.test.ts │ ├── lexo.ts │ ├── mappers.test.ts │ ├── mappers.ts │ ├── new_id.ts │ ├── util.test.ts │ └── util.ts ├── context │ ├── context_types.ts │ ├── extract_keys.test.ts │ ├── extract_keys.ts │ ├── index.ts │ ├── validators.test.ts │ └── validators.ts ├── debug │ ├── debugDynamo.ts │ └── index.ts ├── index.ts └── operations │ ├── batch_find_by_ids.ts │ ├── batch_replace_delete.ts │ ├── batch_utils.ts │ ├── delete_by_id.test.ts │ ├── delete_by_id.ts │ ├── delete_child_by_id.test.ts │ ├── delete_child_by_id.ts │ ├── e2e.test.ts │ ├── find.test.ts │ ├── find.ts │ ├── find_by_id.test.ts │ ├── find_by_id.ts │ ├── find_by_id_with_children.test.ts │ ├── find_by_id_with_children.ts │ ├── find_child_by_id.test.ts │ ├── find_child_by_id.ts │ ├── find_children.test.ts │ ├── find_children.ts │ ├── index.ts │ ├── insert.test.ts │ ├── insert.ts │ ├── replace.test.ts │ ├── replace.ts │ ├── transact_find_by_ids.ts │ ├── transact_write.test.ts │ ├── transact_write.ts │ ├── update_by_id.test.ts │ ├── update_by_id.ts │ └── update_child_by_id.ts ├── testutil ├── debug_tests.ts ├── dynamo_mock.ts ├── local_dynamo_db.ts └── setupBeforeEnv.js ├── tsconfig.json └── typedoc.json /.eslintignore: -------------------------------------------------------------------------------- 1 | examples/**/*.ts 2 | dist/**/*.ts 3 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', // Specifies the ESLint parser 3 | extends: [ 4 | 'plugin:@typescript-eslint/recommended', // Uses the recommended rules from the @typescript-eslint/eslint-plugin 5 | ], 6 | parserOptions: { 7 | ecmaVersion: 2018, // Allows for the parsing of modern ECMAScript features 8 | sourceType: 'module', // Allows for the use of imports 9 | }, 10 | rules: { 11 | // Place to specify ESLint rules. Can be used to overwrite rules specified from the extended configs 12 | // e.g. "@typescript-eslint/explicit-function-return-type": "off", 13 | '@typescript-eslint/explicit-module-boundary-types': 'error', 14 | '@typescript-eslint/no-explicit-any': 'error', 15 | }, 16 | }; 17 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Node.js CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | node-version: [18.x, 20.x, 22.x] 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Use Node.js ${{matrix.node-version}} 17 | uses: actions/setup-node@v3 18 | with: 19 | node-version: ${{ matrix.node-version }} 20 | - run: sudo docker run --name dynamodb -d -p 8000:8000 amazon/dynamodb-local -jar DynamoDBLocal.jar -port 8000 21 | - run: npm ci 22 | - env: 23 | LOCAL_DYNAMODB_ENDPOINT: http://localhost:8000 24 | AWS_ACCESS_KEY_ID: dummy 25 | AWS_SECRET_ACCESS_KEY: dummy 26 | run: LOCAL_DYNAMODB_ENDPOINT=http://localhost:8000 npm run test 27 | 28 | - run: npm run lint 29 | - run: npm run prettier:check 30 | - run: npm run build 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist/ 3 | tsconfig.tsbuildinfo 4 | coverage/ 5 | .vscode/ -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | src/ 2 | base/ 3 | context/ 4 | debug/ 5 | operations/ 6 | testutil/ 7 | index.ts 8 | dynamo_mock.ts 9 | examples/ 10 | tsconfig.* 11 | .prettierrc 12 | coverage/ 13 | .vscode/ 14 | *.test.ts 15 | *.test.js 16 | .travis.yml 17 | docs/ 18 | .babelrc 19 | .eslintrc.js 20 | typedoc.json 21 | .eslintignore 22 | .prettierignore 23 | rollup.config.js 24 | jest.config.ts 25 | jest-dynalite-config.js 26 | esbuild.config.js 27 | .github/ 28 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | node_modules/ 3 | docs/ 4 | 5 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "tabWidth": 2, 4 | "trailingComma": "es5", 5 | "arrowParens": "avoid", 6 | "printWidth": 120 7 | } -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: 'node_js' 2 | arch: amd64 3 | os: linux 4 | dist: focal # Default "xenial" (Ubuntu 16.04) has unsupported glibc for nodejs builds available with nvm 5 | node_js: 6 | - 14 7 | - 16 8 | - 18 9 | - 19 10 | script: 11 | - npm run build 12 | - npm test 13 | - npm run prettier:check 14 | - npm run lint 15 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- 1 | TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. -------------------------------------------------------------------------------- /docs/assets/highlight.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --light-hl-0: #795E26; 3 | --dark-hl-0: #DCDCAA; 4 | --light-hl-1: #000000; 5 | --dark-hl-1: #D4D4D4; 6 | --light-hl-2: #A31515; 7 | --dark-hl-2: #CE9178; 8 | --light-hl-3: #008000; 9 | --dark-hl-3: #6A9955; 10 | --light-hl-4: #0000FF; 11 | --dark-hl-4: #569CD6; 12 | --light-hl-5: #0070C1; 13 | --dark-hl-5: #4FC1FF; 14 | --light-hl-6: #001080; 15 | --dark-hl-6: #9CDCFE; 16 | --light-hl-7: #AF00DB; 17 | --dark-hl-7: #C586C0; 18 | --light-hl-8: #000000; 19 | --dark-hl-8: #C8C8C8; 20 | --light-hl-9: #098658; 21 | --dark-hl-9: #B5CEA8; 22 | --light-hl-10: #267F99; 23 | --dark-hl-10: #4EC9B0; 24 | --light-code-background: #FFFFFF; 25 | --dark-code-background: #1E1E1E; 26 | } 27 | 28 | @media (prefers-color-scheme: light) { :root { 29 | --hl-0: var(--light-hl-0); 30 | --hl-1: var(--light-hl-1); 31 | --hl-2: var(--light-hl-2); 32 | --hl-3: var(--light-hl-3); 33 | --hl-4: var(--light-hl-4); 34 | --hl-5: var(--light-hl-5); 35 | --hl-6: var(--light-hl-6); 36 | --hl-7: var(--light-hl-7); 37 | --hl-8: var(--light-hl-8); 38 | --hl-9: var(--light-hl-9); 39 | --hl-10: var(--light-hl-10); 40 | --code-background: var(--light-code-background); 41 | } } 42 | 43 | @media (prefers-color-scheme: dark) { :root { 44 | --hl-0: var(--dark-hl-0); 45 | --hl-1: var(--dark-hl-1); 46 | --hl-2: var(--dark-hl-2); 47 | --hl-3: var(--dark-hl-3); 48 | --hl-4: var(--dark-hl-4); 49 | --hl-5: var(--dark-hl-5); 50 | --hl-6: var(--dark-hl-6); 51 | --hl-7: var(--dark-hl-7); 52 | --hl-8: var(--dark-hl-8); 53 | --hl-9: var(--dark-hl-9); 54 | --hl-10: var(--dark-hl-10); 55 | --code-background: var(--dark-code-background); 56 | } } 57 | 58 | :root[data-theme='light'] { 59 | --hl-0: var(--light-hl-0); 60 | --hl-1: var(--light-hl-1); 61 | --hl-2: var(--light-hl-2); 62 | --hl-3: var(--light-hl-3); 63 | --hl-4: var(--light-hl-4); 64 | --hl-5: var(--light-hl-5); 65 | --hl-6: var(--light-hl-6); 66 | --hl-7: var(--light-hl-7); 67 | --hl-8: var(--light-hl-8); 68 | --hl-9: var(--light-hl-9); 69 | --hl-10: var(--light-hl-10); 70 | --code-background: var(--light-code-background); 71 | } 72 | 73 | :root[data-theme='dark'] { 74 | --hl-0: var(--dark-hl-0); 75 | --hl-1: var(--dark-hl-1); 76 | --hl-2: var(--dark-hl-2); 77 | --hl-3: var(--dark-hl-3); 78 | --hl-4: var(--dark-hl-4); 79 | --hl-5: var(--dark-hl-5); 80 | --hl-6: var(--dark-hl-6); 81 | --hl-7: var(--dark-hl-7); 82 | --hl-8: var(--dark-hl-8); 83 | --hl-9: var(--dark-hl-9); 84 | --hl-10: var(--dark-hl-10); 85 | --code-background: var(--dark-code-background); 86 | } 87 | 88 | .hl-0 { color: var(--hl-0); } 89 | .hl-1 { color: var(--hl-1); } 90 | .hl-2 { color: var(--hl-2); } 91 | .hl-3 { color: var(--hl-3); } 92 | .hl-4 { color: var(--hl-4); } 93 | .hl-5 { color: var(--hl-5); } 94 | .hl-6 { color: var(--hl-6); } 95 | .hl-7 { color: var(--hl-7); } 96 | .hl-8 { color: var(--hl-8); } 97 | .hl-9 { color: var(--hl-9); } 98 | .hl-10 { color: var(--hl-10); } 99 | pre, code { background: var(--code-background); } 100 | -------------------------------------------------------------------------------- /esbuild.config.js: -------------------------------------------------------------------------------- 1 | const esbuild = require('esbuild'); 2 | const { nodeExternalsPlugin } = require('esbuild-node-externals'); 3 | 4 | esbuild.build({ 5 | entryPoints: ['./src/index.ts'], 6 | outdir: './dist', 7 | bundle: true, 8 | minify: false, 9 | platform: 'node', 10 | target: 'node12', 11 | plugins: [nodeExternalsPlugin()], 12 | sourcemap: true, 13 | }); 14 | -------------------------------------------------------------------------------- /examples/adjacency_list.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { 3 | createContext, 4 | insert, 5 | findById, 6 | find, 7 | findByIdWithChildren, 8 | } from '../src'; 9 | import { ChildCollection, RootCollection } from '../src'; 10 | 11 | const DYNAMODB_ENDPOINT = 12 | process.env.DYNAMODB_ENDPOINT || 'http://localhost:8000'; 13 | 14 | /** 15 | * in order to use this layout, you will need to start up DynamoDB local 16 | * and provision the table 17 | * 18 | * docker run -p 8000:8000 amazon/dynamodb-local 19 | * 20 | * aws dynamodb create-table \ 21 | * --endpoint-url http://localhost:8000 \ 22 | * --table-name global \ 23 | * --attribute-definitions AttributeName=id,AttributeType=S AttributeName=collection,AttributeType=S \ 24 | * AttributeName=gs2p,AttributeType=S AttributeName=gs2s,AttributeType=S \ 25 | * AttributeName=gs3p,AttributeType=S AttributeName=gs3s,AttributeType=S \ 26 | * --key-schema KeyType=HASH,AttributeName=id KeyType=SORT,AttributeName=collection \ 27 | * --billing-mode PAY_PER_REQUEST \ 28 | * --global-secondary-indexes 'IndexName=gs1,KeySchema=[{KeyType="HASH",AttributeName=collection},{KeyType=SORT,AttributeName=id}],Projection={ProjectionType=ALL}' \ 29 | * 'IndexName=gs2,KeySchema=[{KeyType="HASH",AttributeName="gs2p"},{KeyType=SORT,AttributeName=gs2s}],Projection={ProjectionType=ALL}' \ 30 | * 'IndexName=gs3,KeySchema=[{KeyType="HASH",AttributeName="gs3p"},{KeyType=SORT,AttributeName=gs3s}],Projection={ProjectionType=ALL}' 31 | */ 32 | 33 | const globalTableLayout = { 34 | tableName: 'global', 35 | primaryKey: { 36 | partitionKey: 'id', 37 | sortKey: 'collection', 38 | }, 39 | findKeys: [ 40 | { indexName: 'gs2', partitionKey: 'gs2p', sortKey: 'gs2s' }, 41 | { indexName: 'gs3', partitionKey: 'gs3p', sortKey: 'gs3s' }, 42 | ], 43 | }; 44 | 45 | // The collection names are prefixed to specifically 46 | // order the objects in the child collections (see below). 47 | const USERS_COLLECTION = '1users'; 48 | const CREDENTIALS_COLLECTION = '0credentials'; 49 | const ADDRESSES_COLLECTION = '2addresses'; 50 | 51 | const usersCollection: RootCollection = { 52 | name: USERS_COLLECTION, 53 | layout: globalTableLayout, 54 | }; 55 | 56 | const credentialsCollection: ChildCollection = { 57 | type: 'child', 58 | name: CREDENTIALS_COLLECTION, 59 | parentCollectionName: USERS_COLLECTION, 60 | layout: globalTableLayout, 61 | foreignKeyPath: ['userId'], 62 | accessPatterns: [ 63 | { indexName: 'gs2', partitionKeys: [['userId']], sortKeys: [['type']] }, // get credential by userId and type 64 | ], 65 | }; 66 | 67 | const addressesCollection: ChildCollection = { 68 | name: ADDRESSES_COLLECTION, 69 | type: 'child', 70 | layout: globalTableLayout, 71 | foreignKeyPath: ['userId'], 72 | parentCollectionName: USERS_COLLECTION, 73 | }; 74 | 75 | async function main(): Promise { 76 | const ddb = new DynamoDBClient({ 77 | endpoint: DYNAMODB_ENDPOINT, 78 | region: process.env.AWS_DEFAULT_REGION || 'us-east-1', 79 | }); 80 | const ctx = createContext(ddb, [ 81 | usersCollection, 82 | credentialsCollection, 83 | addressesCollection, 84 | ]); 85 | 86 | // ** Our example contains three access patterns: ** 87 | // 1. Get a user by ID 88 | // 2. Get a user's credential by ID and type 89 | // 3. Get a user with their addresses objects 90 | // 4. Get a user with their credentials objects 91 | // 92 | // This example demonstrates how we retrieve the latter two using `findByIdWithChildren` 93 | 94 | const user1 = await insert(ctx, USERS_COLLECTION, { 95 | type: 'administrator', 96 | group: 'red', 97 | name: 'Joyce Mannheim', 98 | }); 99 | console.log('created user 1', user1); 100 | const address1 = await insert(ctx, ADDRESSES_COLLECTION, { 101 | userId: user1._id, 102 | type: 'home', 103 | line1: '10 Home Place', 104 | suburb: 'HomeTown', 105 | }); 106 | const address2 = await insert(ctx, ADDRESSES_COLLECTION, { 107 | userId: user1._id, 108 | type: 'work', 109 | line1: '78 Work Close', 110 | suburb: 'CityVillage', 111 | }); 112 | const credentials1 = await insert(ctx, CREDENTIALS_COLLECTION, { 113 | userId: user1._id, 114 | type: 'password', 115 | password: 'testpassword', 116 | }); 117 | const credentials2 = await insert(ctx, CREDENTIALS_COLLECTION, { 118 | userId: user1._id, 119 | type: 'google', 120 | googleUser: 'joyce.mannheim@gmail.com', 121 | }); 122 | 123 | const user2 = await insert(ctx, USERS_COLLECTION, { 124 | type: 'guest', 125 | group: 'blue', 126 | name: 'Bill Langham', 127 | }); 128 | console.log('created user 2', user2); 129 | const address3 = await insert(ctx, ADDRESSES_COLLECTION, { 130 | userId: user2._id, 131 | type: 'home', 132 | line1: '62 Local St', 133 | suburb: 'Daceyville', 134 | }); 135 | const credentials3 = await insert(ctx, CREDENTIALS_COLLECTION, { 136 | userId: user2._id, 137 | type: 'password', 138 | password: '12345', 139 | }); 140 | const credentials4 = await insert(ctx, CREDENTIALS_COLLECTION, { 141 | userId: user2._id, 142 | type: 'google', 143 | googleUser: 'billham@gmail.com', 144 | }); 145 | 146 | // 1. Get a user by ID 147 | console.log('user 1', await findById(ctx, USERS_COLLECTION, user1._id)); 148 | 149 | // 2. Get a user's credential by ID and type 150 | console.log( 151 | 'user 2 password', 152 | await find(ctx, CREDENTIALS_COLLECTION, { 153 | userId: user2._id, 154 | type: 'password', 155 | }) 156 | ); 157 | 158 | // In the next two examples, we use the adjacency list to get the user and the child objects 159 | // in the same result. To ensure the user object comes first in both cases if there is pagination, 160 | // we have deliberately named the collections so the `users` object is ordered between the `addresses` 161 | // and `credentials` objects. We then control the index scan direction accordingly. 162 | 163 | // 3. Get a user with their addresses objects 164 | const result1 = await findByIdWithChildren( 165 | ctx, 166 | USERS_COLLECTION, 167 | user1._id, 168 | [ADDRESSES_COLLECTION], 169 | undefined, 170 | { 171 | scanForward: true, 172 | } 173 | ); // scan forward to get user object first 174 | console.log( 175 | 'user1 addresses', 176 | (result1.root as any)?.name, 177 | result1.children?.[ADDRESSES_COLLECTION], 178 | JSON.stringify(result1) 179 | ); 180 | 181 | // 4. Get a user with their credentials objects 182 | const result2 = await findByIdWithChildren( 183 | ctx, 184 | USERS_COLLECTION, 185 | user2._id, 186 | [CREDENTIALS_COLLECTION], 187 | undefined, 188 | { 189 | scanForward: false, 190 | } 191 | ); // scan backward to get user object first 192 | console.log( 193 | 'user2 credentials', 194 | (result2.root as any).name, 195 | result2.children?.[CREDENTIALS_COLLECTION], 196 | JSON.stringify(result2) 197 | ); 198 | } 199 | 200 | main().catch((error) => console.error('error', error)); 201 | -------------------------------------------------------------------------------- /examples/basic_children.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { 3 | Collection, 4 | createContext, 5 | insert, 6 | find, 7 | findChildren, 8 | findChildById, 9 | deleteChildById, 10 | updateById, 11 | ChildCollection, 12 | RootCollection, 13 | } from '../src'; 14 | 15 | const DYNAMODB_ENDPOINT = 16 | process.env.DYNAMODB_ENDPOINT || 'http://localhost:8000'; 17 | 18 | /** 19 | * in order to use this layout, you will need to start up DynamoDB local 20 | * and provision the table 21 | * 22 | * docker run -p 8000:8000 amazon/dynamodb-local 23 | * 24 | * aws dynamodb create-table \ 25 | * --endpoint-url http://localhost:8000 \ 26 | * --table-name global \ 27 | * --attribute-definitions AttributeName=id,AttributeType=S AttributeName=collection,AttributeType=S \ 28 | * AttributeName=gs2p,AttributeType=S AttributeName=gs2s,AttributeType=S \ 29 | * AttributeName=gs3p,AttributeType=S AttributeName=gs3s,AttributeType=S \ 30 | * --key-schema KeyType=HASH,AttributeName=id KeyType=SORT,AttributeName=collection \ 31 | * --billing-mode PAY_PER_REQUEST \ 32 | * --global-secondary-indexes 'IndexName=gs1,KeySchema=[{KeyType="HASH",AttributeName=collection},{KeyType=SORT,AttributeName=id}],Projection={ProjectionType=ALL}' \ 33 | * 'IndexName=gs2,KeySchema=[{KeyType="HASH",AttributeName="gs2p"},{KeyType=SORT,AttributeName=gs2s}],Projection={ProjectionType=ALL}' \ 34 | * 'IndexName=gs3,KeySchema=[{KeyType="HASH",AttributeName="gs3p"},{KeyType=SORT,AttributeName=gs3s}],Projection={ProjectionType=ALL}' 35 | */ 36 | 37 | const globalTableLayout = { 38 | tableName: 'global', 39 | primaryKey: { 40 | partitionKey: 'id', 41 | sortKey: 'collection', 42 | }, 43 | findKeys: [ 44 | { indexName: 'gs2', partitionKey: 'gs2p', sortKey: 'gs2s' }, 45 | { indexName: 'gs3', partitionKey: 'gs3p', sortKey: 'gs3s' }, 46 | ], 47 | }; 48 | 49 | const usersCollection: RootCollection = { 50 | name: 'users', 51 | layout: globalTableLayout, 52 | accessPatterns: [ 53 | { 54 | indexName: 'gs2', 55 | partitionKeys: [['group'], ['type']], 56 | sortKeys: [['name']], 57 | }, 58 | ], 59 | }; 60 | 61 | const addressesCollection: ChildCollection = { 62 | name: 'addresses', 63 | type: 'child', 64 | layout: globalTableLayout, 65 | foreignKeyPath: ['userId'], 66 | parentCollectionName: 'users', 67 | }; 68 | 69 | async function main(): Promise { 70 | const ddb = new DynamoDBClient({ 71 | endpoint: DYNAMODB_ENDPOINT, 72 | region: process.env.AWS_DEFAULT_REGION || 'us-east-1', 73 | }); 74 | const ctx = createContext(ddb, [usersCollection, addressesCollection]); 75 | 76 | const user1 = await insert(ctx, 'users', { 77 | type: 'administrator', 78 | group: 'red', 79 | name: 'Joyce Mannheim', 80 | }); 81 | console.log('created user 1', user1); 82 | const address1 = await insert(ctx, 'addresses', { 83 | userId: user1._id, 84 | type: 'home', 85 | line1: '10 Home Place', 86 | suburb: 'HomeTown', 87 | }); 88 | const address2 = await insert(ctx, 'addresses', { 89 | userId: user1._id, 90 | type: 'work', 91 | line1: '78 Work Close', 92 | suburb: 'CityVillage', 93 | }); 94 | 95 | const user2 = await insert(ctx, 'users', { 96 | type: 'guest', 97 | group: 'blue', 98 | name: 'Bill Langham', 99 | }); 100 | console.log('created user 2', user2); 101 | const address3 = await insert(ctx, 'addresses', { 102 | userId: user2._id, 103 | type: 'home', 104 | line1: '62 Local St', 105 | suburb: 'Daceyville', 106 | }); 107 | 108 | console.log( 109 | 'all addresses for for user 1', 110 | await findChildren(ctx, 'addresses', user1._id) 111 | ); 112 | console.log( 113 | 'all addresses for for user 2', 114 | await findChildren(ctx, 'addresses', user2._id) 115 | ); 116 | 117 | console.log( 118 | 'address 2 retrieved', 119 | await findChildById(ctx, 'addresses', address2._id, user1._id) 120 | ); 121 | 122 | const deleteAddress1 = await deleteChildById( 123 | ctx, 124 | 'addresses', 125 | address1._id, 126 | user1._id 127 | ); 128 | console.log('deleted address 1', deleteAddress1); 129 | 130 | const newObject = await updateById(ctx, 'users', user1._id, { 131 | $setValues: { 132 | name: 'James Mannheim', 133 | password: 'test-password', 134 | group: 'red', 135 | type: 'blue', 136 | }, 137 | }); 138 | console.log('updated user 1', newObject); 139 | } 140 | 141 | main().catch((error) => console.error('error', error)); 142 | -------------------------------------------------------------------------------- /examples/hierarchy.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { Collection, createContext, insert, find, Context } from '../src'; 3 | 4 | const DYNAMODB_ENDPOINT = 5 | process.env.DYNAMODB_ENDPOINT || 'http://localhost:8000'; 6 | 7 | /** 8 | * in order to use this layout, you will need to start up DynamoDB local 9 | * and provision the table 10 | * 11 | * docker run -p 8000:8000 amazon/dynamodb-local 12 | * 13 | * aws dynamodb create-table \ 14 | * --endpoint-url http://localhost:8000 \ 15 | * --table-name global \ 16 | * --attribute-definitions AttributeName=id,AttributeType=S AttributeName=collection,AttributeType=S \ 17 | * AttributeName=gs2p,AttributeType=S AttributeName=gs2s,AttributeType=S \ 18 | * AttributeName=gs3p,AttributeType=S AttributeName=gs3s,AttributeType=S \ 19 | * --key-schema KeyType=HASH,AttributeName=id KeyType=SORT,AttributeName=collection \ 20 | * --billing-mode PAY_PER_REQUEST \ 21 | * --global-secondary-indexes 'IndexName=gs1,KeySchema=[{KeyType="HASH",AttributeName=collection},{KeyType=SORT,AttributeName=id}],Projection={ProjectionType=ALL}' \ 22 | * 'IndexName=gs2,KeySchema=[{KeyType="HASH",AttributeName="gs2p"},{KeyType=SORT,AttributeName=gs2s}],Projection={ProjectionType=ALL}' \ 23 | * 'IndexName=gs3,KeySchema=[{KeyType="HASH",AttributeName="gs3p"},{KeyType=SORT,AttributeName=gs3s}],Projection={ProjectionType=ALL}' 24 | */ 25 | 26 | const globalTableLayout = { 27 | tableName: 'global', 28 | primaryKey: { 29 | partitionKey: 'id', 30 | sortKey: 'collection', 31 | }, 32 | listAllKey: { 33 | indexName: 'gs1', 34 | partitionKey: 'collection', 35 | sortKey: 'id', 36 | }, 37 | findKeys: [ 38 | { 39 | indexName: 'gs2', 40 | partitionKey: 'gs2p', 41 | sortKey: 'gs2s', 42 | }, 43 | { 44 | indexName: 'gs3', 45 | partitionKey: 'gs3p', 46 | sortKey: 'gs3s', 47 | }, 48 | ], 49 | }; 50 | 51 | const lowercaseNormaliser = (keyPath: string[], value: string) => 52 | value.toLowerCase(); 53 | 54 | const locationsCollection: Collection = { 55 | name: 'locations', 56 | layout: globalTableLayout, 57 | accessPatterns: [ 58 | { 59 | indexName: 'gs2', 60 | partitionKeys: [['country']], 61 | sortKeys: [['state'], ['city'], ['suburb'], ['street']], 62 | options: { stringNormalizer: lowercaseNormaliser }, 63 | }, 64 | { 65 | indexName: 'gs3', 66 | partitionKeys: [], 67 | sortKeys: [['description', 'name']], 68 | options: { stringNormalizer: lowercaseNormaliser }, 69 | }, 70 | ], 71 | }; 72 | 73 | type Location = { 74 | description: { 75 | name: string; 76 | }; 77 | country: string; 78 | state: string; 79 | city: string; 80 | suburb: string; 81 | street: string; 82 | }; 83 | 84 | const addLocation = (ctx: Context, location: Location) => 85 | insert(ctx, 'locations', location); 86 | 87 | const createLocation = ( 88 | country: string, 89 | state: string, 90 | city: string, 91 | suburb: string, 92 | street: string, 93 | name: string 94 | ) => ({ 95 | country, 96 | state, 97 | city, 98 | suburb, 99 | street, 100 | description: { 101 | name, 102 | }, 103 | }); 104 | 105 | const populateLocations = (ctx: Context) => { 106 | addLocation( 107 | ctx, 108 | createLocation( 109 | 'AU', 110 | 'NSW', 111 | 'Sydney', 112 | 'Sydney', 113 | 'Bennelong Point', 114 | 'Sydney Opera House' 115 | ) 116 | ); 117 | addLocation( 118 | ctx, 119 | createLocation( 120 | 'AU', 121 | 'NSW', 122 | 'Sydney', 123 | 'Bondi', 124 | 'Campbell Parade', 125 | 'Bondi Beach' 126 | ) 127 | ); 128 | addLocation( 129 | ctx, 130 | createLocation( 131 | 'AU', 132 | 'NSW', 133 | 'Sydney', 134 | 'Sydney', 135 | 'Wheat Road', 136 | 'Darling Harbour' 137 | ) 138 | ); 139 | addLocation( 140 | ctx, 141 | createLocation('AU', 'NSW', 'Sydney', 'Manly', 'The Corso', 'The Corso') 142 | ); 143 | addLocation( 144 | ctx, 145 | createLocation( 146 | 'AU', 147 | 'NSW', 148 | 'Byron Bay', 149 | 'Byron Bay', 150 | 'Cape Byron Walking Track', 151 | 'Smokey Cape Lighthouse' 152 | ) 153 | ); 154 | addLocation( 155 | ctx, 156 | createLocation( 157 | 'AU', 158 | 'NSW', 159 | 'Clifton', 160 | 'Clifton', 161 | 'Lawrence Hargrave Dr', 162 | 'Seacliff Bridge' 163 | ) 164 | ); 165 | addLocation( 166 | ctx, 167 | createLocation( 168 | 'AU', 169 | 'VIC', 170 | 'Melbourne', 171 | 'Melbourne', 172 | 'Queen Street', 173 | 'Queen Victoria Market' 174 | ) 175 | ); 176 | addLocation( 177 | ctx, 178 | createLocation( 179 | 'AU', 180 | 'VIC', 181 | 'Melbourne', 182 | 'Melbourne', 183 | 'Flinders Street', 184 | 'Federation Square' 185 | ) 186 | ); 187 | addLocation( 188 | ctx, 189 | createLocation( 190 | 'AU', 191 | 'VIC', 192 | 'Melbourne', 193 | 'South Yarra', 194 | 'Birdwood Avenue', 195 | 'Royal Botanic Gardens Victoria' 196 | ) 197 | ); 198 | addLocation( 199 | ctx, 200 | createLocation( 201 | 'AU', 202 | 'VIC', 203 | 'Melbourne', 204 | 'Belgrave', 205 | 'Old Monbulk Road', 206 | 'Puffing Billy Railway' 207 | ) 208 | ); 209 | }; 210 | 211 | async function main() { 212 | const ddb = new DynamoDBClient({ 213 | endpoint: DYNAMODB_ENDPOINT, 214 | region: 'us-east-1', 215 | }); 216 | const ctx = createContext(ddb, [locationsCollection]); 217 | 218 | await populateLocations(ctx); 219 | 220 | const allSydneyLocations = await find(ctx, 'locations', { 221 | country: 'AU', 222 | state: 'NSW', 223 | city: 'SYDNEY', 224 | }); 225 | console.log('All Sydney locations', allSydneyLocations.items); 226 | 227 | const allVictoriaLocations = await find(ctx, 'locations', { 228 | country: 'AU', 229 | state: 'VIC', 230 | }); 231 | console.log('All Victoria locations', allVictoriaLocations.items); 232 | 233 | const findSLocations = await find(ctx, 'locations', { 234 | 'description.name': 'S', 235 | }); 236 | console.log("locations beginning with 'S':", findSLocations.items); 237 | } 238 | 239 | main(); 240 | -------------------------------------------------------------------------------- /examples/simplestore/simple_store.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'; 2 | import { 3 | Collection, 4 | createContext, 5 | insert, 6 | findById, 7 | find, 8 | deleteById, 9 | replace, 10 | } from '../../src'; 11 | 12 | const DYNAMODB_ENDPOINT = 13 | process.env.DYNAMODB_ENDPOINT || 'http://localhost:8000'; 14 | const REGION = process.env.REGION || 'fake'; 15 | 16 | /** 17 | * in order to use this layout, you will need to start up DynamoDB local 18 | * and provision the table 19 | * 20 | * docker run -p 8000:8000 amazon/dynamodb-local 21 | * 22 | * 23 | aws dynamodb create-table \ 24 | --endpoint-url http://localhost:8000 \ 25 | --table-name global \ 26 | --attribute-definitions AttributeName=id,AttributeType=S AttributeName=collection,AttributeType=S \ 27 | AttributeName=gs2p,AttributeType=S AttributeName=gs2s,AttributeType=S \ 28 | AttributeName=gs3p,AttributeType=S AttributeName=gs3s,AttributeType=S \ 29 | --key-schema KeyType=HASH,AttributeName=id KeyType=SORT,AttributeName=collection \ 30 | --billing-mode PAY_PER_REQUEST \ 31 | --global-secondary-indexes 'IndexName=gs1,KeySchema=[{KeyType="HASH",AttributeName=collection},{KeyType=SORT,AttributeName=id}],Projection={ProjectionType=ALL}' \ 32 | 'IndexName=gs2,KeySchema=[{KeyType="HASH",AttributeName="gs2p"},{KeyType=SORT,AttributeName=gs2s}],Projection={ProjectionType=ALL}' \ 33 | 'IndexName=gs3,KeySchema=[{KeyType="HASH",AttributeName="gs3p"},{KeyType=SORT,AttributeName=gs3s}],Projection={ProjectionType=ALL}' 34 | */ 35 | 36 | const globalTableLayout = { 37 | tableName: 'global', 38 | primaryKey: { 39 | partitionKey: 'id', 40 | sortKey: 'collection', 41 | }, 42 | findKeys: [ 43 | { 44 | indexName: 'gs2', 45 | partitionKey: 'gs2p', 46 | sortKey: 'gs2s', 47 | }, 48 | { 49 | indexName: 'gs3', 50 | partitionKey: 'gs3p', 51 | sortKey: 'gs3s', 52 | }, 53 | ], 54 | }; 55 | 56 | const usersCollection: Collection = { 57 | name: 'users', 58 | layout: globalTableLayout, 59 | accessPatterns: [ 60 | { indexName: 'gs2', partitionKeys: [], sortKeys: [['email']] }, 61 | { 62 | indexName: 'gs3', 63 | partitionKeys: [['team', 'id']], 64 | sortKeys: [['team', 'employeeCode']], 65 | }, 66 | ], 67 | }; 68 | 69 | const postsCollection: Collection = { 70 | name: 'posts', 71 | layout: globalTableLayout, 72 | accessPatterns: [ 73 | { indexName: 'gs2', partitionKeys: [['userId']], sortKeys: [] }, 74 | ], 75 | }; 76 | 77 | async function main(): Promise { 78 | const ddb = new DynamoDBClient({ endpoint: DYNAMODB_ENDPOINT, region: REGION }); 79 | const ctx = createContext(ddb, [usersCollection, postsCollection]); 80 | 81 | console.log(`Connecting at endpoint ${ddb.config.endpoint}`); 82 | const tables = await ddb.send(new ListTablesCommand({})); 83 | console.log('tables: ', tables.TableNames); 84 | 85 | const user1 = await insert(ctx, 'users', { 86 | name: 'Anayah Dyer', 87 | email: 'anayahd@example.com', 88 | team: { id: 'team-code-1', employeeCode: 'AC-1' }, 89 | }); 90 | const user2 = await insert(ctx, 'users', { 91 | name: 'Ruairidh Hughes', 92 | email: 'ruairidhh@example.com', 93 | team: { id: 'team-code-1', employeeCode: 'AC-2' }, 94 | }); 95 | const user3 = await insert(ctx, 'users', { 96 | name: 'Giles Major', 97 | email: 'giles@example.com', 98 | team: { id: 'team-code-2', employeeCode: 'GT-5' }, 99 | }); 100 | const user4 = await insert(ctx, 'users', { 101 | name: 'Lance Alles', 102 | email: 'lance@example.com', 103 | team: { id: 'team-code-2', employeeCode: 'GT-6' }, 104 | }); 105 | 106 | console.log('inserted users', [user1, user2, user3, user4]); 107 | 108 | const post1 = await insert(ctx, 'posts', { 109 | userId: user1._id, 110 | title: 'How to cook an apple pie', 111 | }); 112 | 113 | const post2 = await insert(ctx, 'posts', { 114 | userId: user1._id, 115 | title: 'Cooking for a dinner party', 116 | }); 117 | 118 | const post3 = await insert(ctx, 'posts', { 119 | userId: user2._id, 120 | title: 'My first blog post', 121 | }); 122 | 123 | console.log('inserted posts', [post1, post2, post3]); 124 | 125 | const foundUser2 = await findById(ctx, 'users', user2._id); 126 | const notFoundUser4 = await findById(ctx, 'users', 'not-found-id'); 127 | 128 | console.log('user 2', foundUser2); 129 | console.log('non-existent user 4', notFoundUser4); 130 | 131 | const postsByUser1 = await find(ctx, 'posts', { userId: user1._id }); 132 | console.log('posts by user 1', postsByUser1.items); 133 | 134 | const deletedItem = await deleteById(ctx, 'posts', post2._id); 135 | console.log('deleted post #2', deletedItem); 136 | 137 | const updatedPost = await replace(ctx, 'posts', { 138 | ...post3, 139 | title: 'Updated first post', 140 | }); 141 | console.log('updated post #3', updatedPost); 142 | 143 | console.log( 144 | 'posts by user 2', 145 | await find(ctx, 'posts', { userId: user2._id }) 146 | ); 147 | 148 | const emailSearch = await find(ctx, 'users', { email: 'anayah' }, undefined, { 149 | filter: { 150 | 'team.employeeCode': { $beginsWith: 'AC' }, 151 | profile: { $exists: false }, 152 | }, 153 | }); 154 | console.log('email search results', emailSearch); 155 | 156 | // Find all users in a team (access pattern 2) 157 | const usersInTeam2 = await find(ctx, 'users', { 'team.id': 'team-code-2' }); 158 | console.log('team 2 users', usersInTeam2); 159 | 160 | // Find user by teamId and employeeCode (access pattern 2) 161 | const userByEmployeeCode = await find(ctx, 'users', { 162 | 'team.id': 'team-code-1', 163 | 'team.employeeCode': 'AC-2', 164 | }); 165 | console.log('user by employee code', userByEmployeeCode); 166 | } 167 | 168 | main().catch((err) => console.error('error during execution', err)); 169 | -------------------------------------------------------------------------------- /jest-dynalite-config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | tables: [ 3 | { 4 | TableName: 'general', 5 | KeySchema: [ 6 | { AttributeName: 'pk', KeyType: 'HASH' }, 7 | { AttributeName: 'sk', KeyType: 'RANGE' }, 8 | ], 9 | AttributeDefinitions: [ 10 | { AttributeName: 'pk', AttributeType: 'S' }, 11 | { AttributeName: 'sk', AttributeType: 'S' }, 12 | { AttributeName: 'gpk1', AttributeType: 'S' }, 13 | { AttributeName: 'gsk1', AttributeType: 'S' }, 14 | { AttributeName: 'gpk2', AttributeType: 'S' }, 15 | { AttributeName: 'gsk2', AttributeType: 'S' }, 16 | { AttributeName: 'gpk3', AttributeType: 'S' }, 17 | { AttributeName: 'gsk3', AttributeType: 'S' }, 18 | { AttributeName: 'gpk4', AttributeType: 'S' }, 19 | { AttributeName: 'gsk4', AttributeType: 'S' }, 20 | ], 21 | GlobalSecondaryIndexes: [ 22 | { 23 | IndexName: 'gsi1', 24 | KeySchema: [ 25 | { AttributeName: 'gpk1', KeyType: 'HASH' }, 26 | { AttributeName: 'gsk1', KeyType: 'RANGE' }, 27 | ], 28 | Projection: { ProjectionType: 'ALL' }, 29 | }, 30 | { 31 | IndexName: 'gsi2', 32 | KeySchema: [ 33 | { AttributeName: 'gpk2', KeyType: 'HASH' }, 34 | { AttributeName: 'gsk2', KeyType: 'RANGE' }, 35 | ], 36 | Projection: { ProjectionType: 'ALL' }, 37 | }, 38 | { 39 | IndexName: 'gsi3', 40 | KeySchema: [ 41 | { AttributeName: 'gpk3', KeyType: 'HASH' }, 42 | { AttributeName: 'gsk3', KeyType: 'RANGE' }, 43 | ], 44 | Projection: { ProjectionType: 'ALL' }, 45 | }, 46 | { 47 | IndexName: 'gsi4', 48 | KeySchema: [ 49 | { AttributeName: 'gpk4', KeyType: 'HASH' }, 50 | { AttributeName: 'gsk4', KeyType: 'RANGE' }, 51 | ], 52 | Projection: { ProjectionType: 'ALL' }, 53 | }, 54 | ], 55 | ProvisionedThroughput: { ReadCapacityUnits: 1, WriteCapacityUnits: 1 }, 56 | }, 57 | ], 58 | }; 59 | 60 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * For a detailed explanation regarding each configuration property and type check, visit: 3 | * https://jestjs.io/docs/en/configuration.html 4 | */ 5 | 6 | export default { 7 | // All imported modules in your tests should be mocked automatically 8 | // automock: false, 9 | 10 | // Stop running tests after `n` failures 11 | // bail: 0, 12 | 13 | // The directory where Jest should store its cached dependency information 14 | // cacheDirectory: "/private/var/folders/gq/cj48n3r134z05_b8pmtcks6r0000gn/T/jest_dx", 15 | 16 | // Automatically clear mock calls and instances between every test 17 | clearMocks: true, 18 | 19 | // Indicates whether the coverage information should be collected while executing the test 20 | // collectCoverage: false, 21 | 22 | // An array of glob patterns indicating a set of files for which coverage information should be collected 23 | // collectCoverageFrom: undefined, 24 | 25 | // The directory where Jest should output its coverage files 26 | coverageDirectory: "coverage", 27 | 28 | // An array of regexp pattern strings used to skip coverage collection 29 | // coveragePathIgnorePatterns: [ 30 | // "/node_modules/" 31 | // ], 32 | 33 | // Indicates which provider should be used to instrument code for coverage 34 | coverageProvider: "v8", 35 | 36 | // A list of reporter names that Jest uses when writing coverage reports 37 | // coverageReporters: [ 38 | // "json", 39 | // "text", 40 | // "lcov", 41 | // "clover" 42 | // ], 43 | 44 | // An object that configures minimum threshold enforcement for coverage results 45 | // coverageThreshold: undefined, 46 | 47 | // A path to a custom dependency extractor 48 | // dependencyExtractor: undefined, 49 | 50 | // Make calling deprecated APIs throw helpful error messages 51 | // errorOnDeprecated: false, 52 | 53 | // Force coverage collection from ignored files using an array of glob patterns 54 | // forceCoverageMatch: [], 55 | 56 | // A path to a module which exports an async function that is triggered once before all test suites 57 | // globalSetup: undefined, 58 | 59 | // A path to a module which exports an async function that is triggered once after all test suites 60 | // globalTeardown: undefined, 61 | 62 | // A set of global variables that need to be available in all test environments 63 | // globals: {}, 64 | 65 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 66 | // maxWorkers: "50%", 67 | 68 | // An array of directory names to be searched recursively up from the requiring module's location 69 | // moduleDirectories: [ 70 | // "node_modules" 71 | // ], 72 | 73 | // An array of file extensions your modules use 74 | // moduleFileExtensions: [ 75 | // "js", 76 | // "json", 77 | // "jsx", 78 | // "ts", 79 | // "tsx", 80 | // "node" 81 | // ], 82 | 83 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 84 | // moduleNameMapper: {}, 85 | 86 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 87 | // modulePathIgnorePatterns: [], 88 | 89 | // Activates notifications for test results 90 | // notify: false, 91 | 92 | // An enum that specifies notification mode. Requires { notify: true } 93 | // notifyMode: "failure-change", 94 | 95 | // A preset that is used as a base for Jest's configuration 96 | // preset: undefined, 97 | 98 | // Run tests from one or more projects 99 | // projects: undefined, 100 | 101 | // Use this configuration option to add custom reporters to Jest 102 | // reporters: undefined, 103 | 104 | // Automatically reset mock state between every test 105 | // resetMocks: false, 106 | 107 | // Reset the module registry before running each individual test 108 | // resetModules: false, 109 | 110 | // A path to a custom resolver 111 | // resolver: undefined, 112 | 113 | // Automatically restore mock state between every test 114 | // restoreMocks: false, 115 | 116 | // The root directory that Jest should scan for tests and modules within 117 | // rootDir: undefined, 118 | 119 | // A list of paths to directories that Jest should use to search for files in 120 | // roots: [ 121 | // "" 122 | // ], 123 | 124 | // Allows you to use a custom runner instead of Jest's default test runner 125 | // runner: "jest-runner", 126 | 127 | // The paths to modules that run some code to configure or set up the testing environment before each test 128 | setupFiles: ['./testutil/setupBeforeEnv.js'], 129 | 130 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 131 | // setupFilesAfterEnv: [], 132 | 133 | // The number of seconds after which a test is considered as slow and reported as such in the results. 134 | // slowTestThreshold: 5, 135 | 136 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 137 | // snapshotSerializers: [], 138 | 139 | // The test environment that will be used for testing 140 | testEnvironment: "node", 141 | 142 | // Options that will be passed to the testEnvironment 143 | // testEnvironmentOptions: {}, 144 | 145 | // Adds a location field to test results 146 | // testLocationInResults: false, 147 | 148 | // The glob patterns Jest uses to detect test files 149 | // testMatch: [ 150 | // "**/__tests__/**/*.[jt]s?(x)", 151 | // "**/?(*.)+(spec|test).[tj]s?(x)" 152 | // ], 153 | 154 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 155 | // testPathIgnorePatterns: [ 156 | // "/node_modules/" 157 | // ], 158 | 159 | // The regexp pattern or array of patterns that Jest uses to detect test files 160 | // testRegex: [], 161 | 162 | // This option allows the use of a custom results processor 163 | // testResultsProcessor: undefined, 164 | 165 | // This option allows use of a custom test runner 166 | // testRunner: "jasmine2", 167 | 168 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 169 | // testURL: "http://localhost", 170 | 171 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 172 | // timers: "real", 173 | 174 | // A map from regular expressions to paths to transformers 175 | transform: { 176 | '^.+\\.(t|j)sx?$': '@swc/jest' 177 | }, 178 | 179 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 180 | // transformIgnorePatterns: [ 181 | // "/node_modules/", 182 | // "\\.pnp\\.[^\\/]+$" 183 | // ], 184 | 185 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 186 | // unmockedModulePathPatterns: undefined, 187 | 188 | // Indicates whether each individual test should be reported during the run 189 | // verbose: undefined, 190 | 191 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 192 | // watchPathIgnorePatterns: [], 193 | 194 | // Whether to use watchman for file crawling 195 | // watchman: true, 196 | }; 197 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "dynaglue", 4 | "version": "2.0.3", 5 | "description": "dynaglue", 6 | "main": "dist/index.js", 7 | "types": "dist/index.d.ts", 8 | "scripts": { 9 | "compile:build": "tsup src/index.ts -d dist --dts --format esm,cjs --platform node --target node16", 10 | "build": "run-p compile:build", 11 | "prepublishOnly": "npm run build", 12 | "doc": "typedoc --tsconfig tsconfig.json src/index.ts", 13 | "lint": "eslint '**/*.ts'", 14 | "test": "jest", 15 | "test-local": "LOCAL_DYNAMODB_ENDPOINT=http://localhost:8000 npm run test", 16 | "prettier:check": "prettier -c **/*.ts", 17 | "prettier:write": "prettier --write **/*.ts", 18 | "test:watch": "jest --watch" 19 | }, 20 | "engines": { 21 | "node": ">=16" 22 | }, 23 | "author": "Chris Armstrong", 24 | "license": "Apache-2.0", 25 | "devDependencies": { 26 | "@aws-sdk/client-dynamodb": "^3.53.0", 27 | "@aws-sdk/util-dynamodb": "^3.53.0", 28 | "@swc/core": "^1.3.67", 29 | "@swc/jest": "^0.2.26", 30 | "@typescript-eslint/eslint-plugin": "^5.61.0", 31 | "@typescript-eslint/parser": "^5.61.0", 32 | "dynalite": "^3.2.1", 33 | "esbuild": "^0.16.13", 34 | "esbuild-node-externals": "^1.4.1", 35 | "eslint": "^8.44.0", 36 | "jest": "^29.6.0", 37 | "jest-dynalite": "^3.4.1", 38 | "npm-run-all": "^4.1.5", 39 | "prettier": "^2.3.2", 40 | "ts-node": "^8.10.2", 41 | "tsup": "^7.1.0", 42 | "typedoc": "^0.24.8", 43 | "typescript": "~4.8.4" 44 | }, 45 | "peerDependencies": { 46 | "@aws-sdk/client-dynamodb": "^3.53.0", 47 | "@aws-sdk/util-dynamodb": "^3.53.0" 48 | }, 49 | "dependencies": { 50 | "@types/debug": "^4.1.7", 51 | "@types/jest": "^29.5.2", 52 | "@types/lodash": "^4.14.172", 53 | "@types/node": "^12.20.21", 54 | "@types/object-hash": "^3.0.6", 55 | "@types/validator": "^13.6.3", 56 | "@types/verror": "^1.10.5", 57 | "debug": "^4.3.4", 58 | "lodash": "^4.17.21", 59 | "object-hash": "^3.0.0", 60 | "validator": "^13.6.0", 61 | "verror": "^1.10.0" 62 | }, 63 | "repository": { 64 | "url": "https://github.com/chris-armstrong/dynaglue.git" 65 | }, 66 | "prettier": { 67 | "trailingComma": "es5", 68 | "tabWidth": 2, 69 | "semi": true, 70 | "singleQuote": true 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/base/access_pattern.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * A path to a key in a document. Each path element should be 3 | * a string. For example, a field called 'users' at the top level is 4 | * simply `['userId']`, while a nested field at 'description.title[0]' 5 | * is `['description', 'title', '0']` 6 | */ 7 | export type KeyPath = string[]; 8 | 9 | /** @internal */ 10 | export const describeKeyPath = (keyPath: KeyPath): string => keyPath.join('.'); 11 | 12 | /** 13 | * A function used to normalise a key value before it is packed into 14 | * an index value. 15 | * 16 | * This is useful when you want to index your fields differently to how 17 | * they're stored e.g. lowercase values before they are put in the 18 | * index. 19 | */ 20 | export type NormaliserFunction = (path: KeyPath, value: string) => string; 21 | 22 | /** 23 | * Options for an access pattern. 24 | */ 25 | export type AccessPatternOptions = { 26 | /** A normaliser for values stored in the access pattern's index */ 27 | stringNormalizer?: NormaliserFunction; 28 | }; 29 | 30 | /** 31 | * A **access pattern** defines how to copy values from the entities 32 | * in a collection into an index. They are used to permit indexed 33 | * queryies with the [[find]] operation. 34 | * 35 | * In order to index values, you must first have defined a GSI in 36 | * your [[CollectionLayout|collection's layout]]. 37 | */ 38 | export interface AccessPattern { 39 | /** The name of the index in your [[CollectionLayout|collection's layout]] */ 40 | indexName: string; 41 | /** The [[KeyPath|key paths]] to extract and store in the partition key. 42 | * 43 | * Key paths specified for a partition key must be present in documents 44 | * stored in the associated [[Collection]]. 45 | */ 46 | partitionKeys: KeyPath[]; 47 | /** The [[KeyPath|key paths]] to extract and store in the sort key. 48 | * 49 | * Leave this value unspecified if your GSI does not have a sort key. If it does 50 | * but you don't want a sort key, populate it with an empty array 51 | */ 52 | sortKeys?: KeyPath[]; 53 | /** Key paths [[KeyPath|key paths]] specified as required must be present in documents 54 | * stored in the associated [[Collection]]. 55 | */ 56 | requiredPaths?: KeyPath[]; 57 | /** Options for the index, such as string normalisers */ 58 | options?: AccessPatternOptions; 59 | } 60 | 61 | export const describeAccessPattern = ({ 62 | indexName, 63 | partitionKeys, 64 | sortKeys, 65 | }: AccessPattern): string => 66 | `[access pattern index=${indexName} partition=${partitionKeys.join(',')} ` + 67 | `${sortKeys ? `sort=${sortKeys.join(',')}` : ''}]`; 68 | -------------------------------------------------------------------------------- /src/base/coerce_error.ts: -------------------------------------------------------------------------------- 1 | import VError from 'verror'; 2 | 3 | /** 4 | * Given a caught exception, coerce it to an Error type through 5 | * introspection. If it isn't a subclass of `Error`, wraps it 6 | * in an InternalError 7 | * @param error the error or not 8 | * @returns an Error object 9 | */ 10 | export const coerceError = (error: unknown): Error => { 11 | if (error instanceof Error) return error; 12 | return new VError( 13 | { name: 'unknown.error', info: { error } }, 14 | 'An error of an unknown type occurred' 15 | ); 16 | }; 17 | -------------------------------------------------------------------------------- /src/base/collection.ts: -------------------------------------------------------------------------------- 1 | import { CollectionLayout } from './layout'; 2 | import { AccessPattern, KeyPath } from './access_pattern'; 3 | 4 | export interface CommonCollection { 5 | /** 6 | * The name of the collection. Use this 7 | * when creating, updating or retrieving values. 8 | */ 9 | name: string; 10 | /** 11 | * The layout, which maps out how 12 | * to assemble values for storage and 13 | * retrieval so they are indexed 14 | * correctly. 15 | */ 16 | layout: CollectionLayout; 17 | /** 18 | * Access patterns define how to retrieve 19 | * values on keys other than a document's 20 | * `_id` field 21 | */ 22 | accessPatterns?: AccessPattern[]; 23 | /** 24 | * The optional key path of an attribute 25 | * that will be copied to the TTL attribute 26 | * on collection documents. 27 | * 28 | * The value must be either: 29 | * * a Date object 30 | * * a string in ISO8601 format 31 | * * a number, in milliseconds since the Epoch 32 | * 33 | * (any other value will be ignored) 34 | * 35 | * The layout must also define `ttlAttribute` when you 36 | * specify `ttlKeyPath`, otherwise 37 | * an exception will be thrown when you create 38 | * the context. 39 | */ 40 | ttlKeyPath?: KeyPath; 41 | 42 | /** 43 | * An optional function that is used to 44 | * generate IDs when the id property 45 | * is blank 46 | */ 47 | idGenerator?: () => string; 48 | } 49 | 50 | /** 51 | * A root collection (or top-level collection) is 52 | * one without a parent. It is stored in a such a 53 | * way to enable the individual retrieval of items 54 | * or bulk retrieval of of child items by foreign 55 | * key. 56 | */ 57 | export interface RootCollection extends CommonCollection { 58 | /** 59 | * A fixed value that distinguishes this as a 60 | * parent or 'root' collection (It is optional to 61 | * specify this) 62 | */ 63 | type?: 'root'; 64 | } 65 | 66 | /** 67 | * A child collection lets you store values related 68 | * to a parent collection (defined by [[RootCollection]]) 69 | * 70 | * It's used to add data to an entity that is not always 71 | * needed for every access pattern, and can only be referenced 72 | * with regards to its parent. 73 | */ 74 | export interface ChildCollection extends CommonCollection { 75 | /** A fixed value that must be defined to distinguish 76 | * this collection as a child collection 77 | */ 78 | type: 'child'; 79 | 80 | /** 81 | * The name of the parent collection. It must be added 82 | * to the same context as this child collection. 83 | */ 84 | parentCollectionName: string; 85 | 86 | /** 87 | * The foreign key path 88 | */ 89 | foreignKeyPath: KeyPath; 90 | } 91 | 92 | /** 93 | * A collection is a division based on entity type 94 | * for storage in a DynamoDB table. It tells dynaglue 95 | * how to store your documents and how to index them 96 | * for this entity type. 97 | * 98 | * Defining multiple 99 | * collections on the same DynamoDB table layout 100 | * allows you to store multiple data types in the 101 | * same DynamoDB table (i.e. a single table design). 102 | * 103 | * *Use the [[RootCollection]] or [[ChildCollection]] for specifying 104 | * a collection* 105 | */ 106 | export type Collection = RootCollection | ChildCollection; 107 | -------------------------------------------------------------------------------- /src/base/collection_definition.ts: -------------------------------------------------------------------------------- 1 | import { KeyPath, AccessPatternOptions } from './access_pattern'; 2 | import { Collection, ChildCollection, RootCollection } from './collection'; 3 | 4 | /** 5 | * 6 | * Extracted keys from access patterns. Used internally 7 | * to build stored values 8 | */ 9 | export type ExtractKey = { 10 | type: 'partition' | 'sort' | 'ttl'; 11 | key: string; 12 | valuePaths: KeyPath[]; 13 | options: AccessPatternOptions; 14 | requiredPaths?: KeyPath[]; 15 | }; 16 | 17 | /** 18 | * 19 | * Collection mapping (used internally) 20 | */ 21 | export type CollectionDefinition = Collection & { 22 | wrapperExtractKeys: ExtractKey[]; 23 | }; 24 | 25 | /** 26 | * Root collection definition (used internally) 27 | */ 28 | export type RootCollectionDefinition = RootCollection & { 29 | wrapperExtractKeys: ExtractKey[]; 30 | }; 31 | /** 32 | * Child collection definition (used internally) 33 | */ 34 | export type ChildCollectionDefinition = ChildCollection & { 35 | wrapperExtractKeys: ExtractKey[]; 36 | }; 37 | -------------------------------------------------------------------------------- /src/base/common.ts: -------------------------------------------------------------------------------- 1 | import { AttributeValue } from "@aws-sdk/client-dynamodb"; 2 | 3 | /** 4 | * A document stored in a collection. If an _id field 5 | * is not provided, one will be generated on insert. 6 | */ 7 | export type DocumentWithId = { 8 | _id: string; 9 | }; 10 | 11 | /** 12 | * A wrapped document, as it is written to DynamoDB. 13 | * You should not have to manipulate this type directly. 14 | */ 15 | export type WrappedDocument = { 16 | type: string; 17 | value: DocumentType; 18 | }; 19 | 20 | /** 21 | * A DynamoDB primary key 22 | */ 23 | export type Key = { [key: string]: AttributeValue }; 24 | 25 | /** 26 | * The DynamoDB `Set` type 27 | */ 28 | export type DynamoDBSet = Set; 29 | -------------------------------------------------------------------------------- /src/base/conditions_parser.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | parseCompositeCondition, 3 | } from './conditions_parser'; 4 | import { InvalidCompositeConditionException } from './exceptions'; 5 | import { createNameMapper, createValueMapper } from './mappers'; 6 | import { BetweenCondition, CompositeCondition, KeyPathsAndClause } from './conditions'; 7 | import { ConditionParseContext } from './conditions_types'; 8 | 9 | describe('parseCompositeCondition', () => { 10 | const nc = (): ConditionParseContext => ({ 11 | nameMapper: createNameMapper(), 12 | valueMapper: createValueMapper(), 13 | parsePath: [], 14 | }); // eslint-ignore @typescript-eslint/explicit-function-return-type 15 | 16 | describe('operators', () => { 17 | it('should handle $eq', () => { 18 | const context = nc(); 19 | const expression = parseCompositeCondition( 20 | { 'x.y': { $eq: 4 } }, 21 | context 22 | ); 23 | expect(expression).toEqual('#value.x.y = :value0'); 24 | expect(context.valueMapper.get()).toEqual({ 25 | ':value0': { N: '4' }, 26 | }); 27 | expect(context.nameMapper.get()).toEqual({ 28 | '#value': 'value', 29 | }); 30 | }); 31 | 32 | it('should handle $neq', () => { 33 | const context = nc(); 34 | const expression = parseCompositeCondition( 35 | { 'x.y': { $neq: 4 } }, 36 | context 37 | ); 38 | expect(expression).toEqual('#value.x.y <> :value0'); 39 | }); 40 | 41 | it('should handle $lt', () => { 42 | const context = nc(); 43 | const expression = parseCompositeCondition( 44 | { 'x.y': { $lt: 4 } }, 45 | context 46 | ); 47 | expect(expression).toEqual('#value.x.y < :value0'); 48 | }); 49 | 50 | it('should handle $lte', () => { 51 | const context = nc(); 52 | const expression = parseCompositeCondition( 53 | { 'x.y': { $lte: 4 } }, 54 | context 55 | ); 56 | expect(expression).toEqual('#value.x.y <= :value0'); 57 | }); 58 | 59 | it('should handle $gt', () => { 60 | const context = nc(); 61 | const expression = parseCompositeCondition( 62 | { 'x.y': { $gt: 4 } }, 63 | context 64 | ); 65 | expect(expression).toEqual('#value.x.y > :value0'); 66 | }); 67 | 68 | it('should handle $gte', () => { 69 | const context = nc(); 70 | const expression = parseCompositeCondition( 71 | { 'x.y': { $gte: 4 } }, 72 | context 73 | ); 74 | expect(expression).toEqual('#value.x.y >= :value0'); 75 | }); 76 | 77 | it('should handle $between', () => { 78 | const context = nc(); 79 | const expression = parseCompositeCondition( 80 | { 'x.y': { $between: { $gte: 2, $lte: 10.2 } } }, 81 | context 82 | ); 83 | expect(expression).toEqual('#value.x.y BETWEEN :value0 AND :value1'); 84 | }); 85 | 86 | it('should throw on an invalid $between value', () => { 87 | const context = nc(); 88 | expect(() => 89 | parseCompositeCondition({ x: { $between: null as unknown as ({ $gte: unknown, $lte: unknown }) } }, context) 90 | ).toThrowError(InvalidCompositeConditionException); 91 | expect(() => 92 | parseCompositeCondition( 93 | { x: { $between: {} } as BetweenCondition }, 94 | context 95 | ) 96 | ).toThrowError(InvalidCompositeConditionException); 97 | expect(() => 98 | parseCompositeCondition( 99 | { x: { $between: { $gte: 1 } } as BetweenCondition }, 100 | context 101 | ) 102 | ).toThrowError(InvalidCompositeConditionException); 103 | expect(() => 104 | parseCompositeCondition( 105 | { x: { $between: { $lte: 1 } } as BetweenCondition }, 106 | context 107 | ) 108 | ).toThrowError(InvalidCompositeConditionException); 109 | }); 110 | 111 | it('should handle $in', () => { 112 | const context = nc(); 113 | const expression = parseCompositeCondition( 114 | { 'x.y': { $in: [4, 5, 6] } }, 115 | context 116 | ); 117 | expect(expression).toEqual('#value.x.y IN (:value0,:value1,:value2)'); 118 | }); 119 | 120 | it('should throw when a $in operator has 0 arguments', () => { 121 | const context = nc(); 122 | expect(() => 123 | parseCompositeCondition({ x: { $in: [] } }, context) 124 | ).toThrow(InvalidCompositeConditionException); 125 | }); 126 | 127 | it('should throw when a $in operator has more than 100 arguments', () => { 128 | const context = nc(); 129 | expect(() => 130 | parseCompositeCondition( 131 | { x: { $in: new Array(101).fill(null).map((_, index) => index) } }, 132 | context 133 | ) 134 | ).toThrow(InvalidCompositeConditionException); 135 | }); 136 | 137 | it('should on an invalid $in value', () => { 138 | expect(() => 139 | parseCompositeCondition({ x: { $in: null as unknown as unknown[] } }, nc()) 140 | ).toThrowError(InvalidCompositeConditionException); 141 | }); 142 | }); 143 | 144 | describe('functions', () => { 145 | it('should handle $exists true', () => { 146 | const context = nc(); 147 | const expression = parseCompositeCondition( 148 | { y: { $exists: true } }, 149 | context 150 | ); 151 | expect(expression).toEqual('attribute_exists(#value.y)'); 152 | }); 153 | 154 | it('should handle $exists false', () => { 155 | const context = nc(); 156 | const expression = parseCompositeCondition( 157 | { y: { $exists: false } }, 158 | context 159 | ); 160 | expect(expression).toEqual('attribute_not_exists(#value.y)'); 161 | }); 162 | 163 | it('should handle $contains', () => { 164 | const context = nc(); 165 | const expression = parseCompositeCondition( 166 | { $yes: { $contains: 'a string' } }, 167 | context 168 | ); 169 | expect(expression).toEqual('contains(#value.#attr0,:value0)'); 170 | }); 171 | 172 | it('should handle $beginsWith', () => { 173 | const context = nc(); 174 | const expression = parseCompositeCondition( 175 | { 'profile.user': { $beginsWith: 'example_1' } }, 176 | context 177 | ); 178 | expect(expression).toEqual('begins_with(#value.profile.#attr0,:value0)'); 179 | }); 180 | 181 | it('should handle $type', () => { 182 | const context = nc(); 183 | const expression = parseCompositeCondition( 184 | { 'profile.user': { $type: 'S' } }, 185 | context 186 | ); 187 | expect(expression).toEqual( 188 | 'attribute_type(#value.profile.#attr0,:value0)' 189 | ); 190 | }); 191 | }); 192 | 193 | describe('key paths', () => { 194 | it('should AND multiple key paths in the same object', () => { 195 | const context = nc(); 196 | const expression = parseCompositeCondition( 197 | { 'x.y': { $neq: 4 }, myvalue: { $eq: 'a value' } }, 198 | context 199 | ); 200 | expect(expression).toEqual( 201 | '#value.x.y <> :value0 AND #value.myvalue = :value1' 202 | ); 203 | }); 204 | 205 | it('should throw an exception when one of the key paths is an operator', () => { 206 | const context = nc(); 207 | expect(() => 208 | parseCompositeCondition( 209 | { x: { $eq: 4 }, $or: [{ y: { $gt: 4 } }] }, 210 | context 211 | ) 212 | ).toThrow(InvalidCompositeConditionException); 213 | expect(() => 214 | parseCompositeCondition( 215 | { x: { $eq: 4 }, $gt: 4 } as unknown as KeyPathsAndClause, 216 | context 217 | ) 218 | ).toThrow(InvalidCompositeConditionException); 219 | }); 220 | }); 221 | 222 | it('should handle AND correctly', () => { 223 | const context = nc(); 224 | const expression: CompositeCondition = { 225 | $and: [ 226 | { x: { $eq: true }, y: { $gt: 7 } }, 227 | { username: { $beginsWith: 'example1' } }, 228 | ], 229 | }; 230 | expect(parseCompositeCondition(expression, context)).toEqual( 231 | '(#value.x = :value0 AND #value.y > :value1) AND (begins_with(#value.username,:value2))' 232 | ); 233 | }); 234 | 235 | it('should handle OR correctly', () => { 236 | const context = nc(); 237 | const expression: CompositeCondition = { 238 | $or: [ 239 | { username: { $beginsWith: 'example1' } }, 240 | { x: { $eq: true }, y: { $gt: 7 } }, 241 | ], 242 | }; 243 | expect(parseCompositeCondition(expression, context)).toEqual( 244 | '(begins_with(#value.username,:value0)) OR (#value.x = :value1 AND #value.y > :value2)' 245 | ); 246 | }); 247 | 248 | it('should handle NOT correctly', () => { 249 | const context = nc(); 250 | const expression = { 251 | $not: { username: { $beginsWith: 'example1' } }, 252 | }; 253 | expect(parseCompositeCondition(expression, context)).toEqual( 254 | 'NOT begins_with(#value.username,:value0)' 255 | ); 256 | }); 257 | 258 | it('should handle combined OR and AND and NOT correctly', () => { 259 | const context = nc(); 260 | const expression: CompositeCondition = { 261 | $and: [ 262 | { $or: [{ 'point.x': { $lt: 0 } }, { 'point.y': { $lt: 0 } }] }, 263 | { $not: { range: { $gte: 1.0 } } }, 264 | ], 265 | }; 266 | expect(parseCompositeCondition(expression, context)).toBe( 267 | '((#value.point.x < :value0) OR (#value.point.y < :value1)) AND (NOT #value.#attr0 >= :value2)' 268 | ); 269 | }); 270 | }); 271 | -------------------------------------------------------------------------------- /src/base/conditions_parser.ts: -------------------------------------------------------------------------------- 1 | import { NameMapper } from './mappers'; 2 | import { 3 | CompositeCondition, 4 | ConditionValue, 5 | AndCondition, 6 | OrCondition, 7 | NotCondition, 8 | KeyPathsAndClause, 9 | } from './conditions'; 10 | import { InvalidCompositeConditionException } from './exceptions'; 11 | import { ParseElement, ConditionParseContext } from './conditions_types'; 12 | 13 | /** 14 | * @internal 15 | * Is one of the keys in the object a condition operator 16 | */ 17 | const isConditionKey = (key: string): boolean => 18 | [ 19 | '$or', 20 | '$and', 21 | '$eq', 22 | '$neq', 23 | '$gt', 24 | '$gte', 25 | '$lt', 26 | '$lte', 27 | '$between', 28 | '$in', 29 | '$exists', 30 | '$type', 31 | '$beginsWith', 32 | '$contains', 33 | ].includes(key); 34 | 35 | /** 36 | * @internal 37 | * Convert a string to a key path, adding the `value` object prefix. 38 | */ 39 | const mapKeyPath = (key: string, nameMapper: NameMapper): string => 40 | [ 41 | nameMapper.map('value', '#value'), 42 | ...key.split('.').map((pathElement) => nameMapper.map(pathElement)), 43 | ].join('.'); 44 | 45 | /** 46 | * @internal 47 | * 48 | * Parse an object expression and give back a filter/condition expression 49 | * for DynamoDB. 50 | * 51 | * @param clause the expression object 52 | * @param context a context - pass your current NameMapper and ValueMapper, which will be filled out with filter expression parts 53 | * @returns the condition/filter expression 54 | */ 55 | export const parseCompositeCondition = ( 56 | clause: CompositeCondition, 57 | context: ConditionParseContext 58 | ): string => { 59 | /* eslint-disable @typescript-eslint/no-use-before-define */ 60 | if (Object.keys(clause).length === 1) { 61 | if ('$and' in clause) { 62 | return parseAndCondition(clause as AndCondition, context); 63 | } else if ('$or' in clause) { 64 | return parseOrCondition(clause as OrCondition, context); 65 | } else if ('$not' in clause) { 66 | return parseNotCondition(clause as NotCondition, context); 67 | } 68 | } 69 | return parseKeyPathsAndClause(clause as KeyPathsAndClause, context); 70 | /* eslint-enable @typescript-eslint/no-use-before-define */ 71 | }; 72 | 73 | /** 74 | * @internal 75 | * Parse an AND condition expression 76 | */ 77 | const parseAndCondition = ( 78 | clause: AndCondition, 79 | context: ConditionParseContext 80 | ): string => { 81 | const parsePath: ParseElement[] = [ 82 | ...context.parsePath, 83 | { type: 'object', key: '$and' }, 84 | ]; 85 | const updatedContextForIndex = (index: number): ConditionParseContext => ({ 86 | ...context, 87 | parsePath: [...parsePath, { type: 'array', index }], 88 | }); 89 | const subclauses = clause.$and.map((clause, index) => 90 | parseCompositeCondition(clause, updatedContextForIndex(index)) 91 | ); 92 | 93 | return subclauses.map((clause) => `(${clause})`).join(' AND '); 94 | }; 95 | 96 | /** 97 | * @internal 98 | * Parse an OR condition expression 99 | */ 100 | const parseOrCondition = ( 101 | clause: OrCondition, 102 | context: ConditionParseContext 103 | ): string => { 104 | const parsePath: ParseElement[] = [ 105 | ...context.parsePath, 106 | { type: 'object', key: '$or' }, 107 | ]; 108 | const updatedContextForIndex = (index: number): ConditionParseContext => ({ 109 | ...context, 110 | parsePath: [...parsePath, { type: 'array', index }], 111 | }); 112 | const subclauses = clause.$or.map((clause, index) => 113 | parseCompositeCondition(clause, updatedContextForIndex(index)) 114 | ); 115 | 116 | return subclauses.map((clause) => `(${clause})`).join(' OR '); 117 | }; 118 | 119 | /** 120 | * @internal 121 | * Parse an NOT condition expression. 122 | */ 123 | const parseNotCondition = ( 124 | clause: NotCondition, 125 | context: ConditionParseContext 126 | ): string => { 127 | const parsePath: ParseElement[] = [ 128 | ...context.parsePath, 129 | { type: 'object', key: '$not' }, 130 | ]; 131 | const updatedContext = { ...context, parsePath }; 132 | const subclause = parseCompositeCondition(clause.$not, updatedContext); 133 | 134 | return `NOT ${subclause}`; 135 | }; 136 | 137 | /** 138 | * @internal 139 | * 140 | * Parse a key paths object (one that has key paths as keys and 141 | * operators as values. 142 | */ 143 | const parseKeyPathsAndClause = ( 144 | clause: KeyPathsAndClause, 145 | context: ConditionParseContext 146 | ): string => { 147 | const { nameMapper, valueMapper, parsePath } = context; 148 | const paths = Object.keys(clause); 149 | if (paths.length < 1) { 150 | throw new InvalidCompositeConditionException( 151 | 'expected at least one key path with operator', 152 | parsePath 153 | ); 154 | } 155 | const conditionKey = paths.find(isConditionKey); 156 | if (conditionKey) { 157 | const keyParsePath: ParseElement[] = [ 158 | ...parsePath, 159 | { type: 'object', key: conditionKey }, 160 | ]; 161 | throw new InvalidCompositeConditionException( 162 | `unexpected condition key`, 163 | keyParsePath 164 | ); 165 | } 166 | const clauses: string[] = []; 167 | Object.entries(clause).forEach(([path, condition]) => { 168 | const keyParsePath: ParseElement[] = [ 169 | ...parsePath, 170 | { type: 'object', key: path }, 171 | ]; 172 | let clauseString; 173 | const simpleClause = (operator: string, value: ConditionValue): string => { 174 | const valueName = valueMapper.map(value); 175 | return `${mapKeyPath(path, nameMapper)} ${operator} ${valueName}`; 176 | }; 177 | if ('$eq' in condition) { 178 | const value = condition.$eq; 179 | clauseString = simpleClause('=', value); 180 | } else if ('$neq' in condition) { 181 | const value = condition.$neq; 182 | clauseString = simpleClause('<>', value); 183 | } else if ('$gt' in condition) { 184 | const value = condition.$gt; 185 | clauseString = simpleClause('>', value); 186 | } else if ('$gte' in condition) { 187 | const value = condition.$gte; 188 | clauseString = simpleClause('>=', value); 189 | } else if ('$lt' in condition) { 190 | const value = condition.$lt; 191 | clauseString = simpleClause('<', value); 192 | } else if ('$lte' in condition) { 193 | const value = condition.$lte; 194 | clauseString = simpleClause('<=', value); 195 | } else if ('$between' in condition) { 196 | const value = condition.$between; 197 | if ( 198 | !value || 199 | typeof value !== 'object' || 200 | typeof value.$lte === 'undefined' || 201 | typeof value.$gte === 'undefined' 202 | ) { 203 | throw new InvalidCompositeConditionException( 204 | '$between must be an object with values for $lte and $gte', 205 | keyParsePath 206 | ); 207 | } 208 | const { $lte, $gte } = condition.$between; 209 | const value1 = valueMapper.map($gte); 210 | const value2 = valueMapper.map($lte); 211 | clauseString = `${mapKeyPath( 212 | path, 213 | nameMapper 214 | )} BETWEEN ${value1} AND ${value2}`; 215 | } else if ('$in' in condition) { 216 | if (!Array.isArray(condition.$in)) { 217 | throw new InvalidCompositeConditionException( 218 | '$in must be an array of values', 219 | keyParsePath 220 | ); 221 | } 222 | const values = condition.$in; 223 | if (values.length > 100) { 224 | throw new InvalidCompositeConditionException( 225 | '$in condition has too many values', 226 | keyParsePath 227 | ); 228 | } else if (values.length === 0) { 229 | throw new InvalidCompositeConditionException( 230 | '$in condition must have at least one value', 231 | keyParsePath 232 | ); 233 | } 234 | const valueNames = values.map((value) => valueMapper.map(value)); 235 | clauseString = `${mapKeyPath(path, nameMapper)} IN (${valueNames.join( 236 | ',' 237 | )})`; 238 | } else if ('$exists' in condition) { 239 | const fn = condition.$exists 240 | ? 'attribute_exists' 241 | : 'attribute_not_exists'; 242 | clauseString = `${fn}(${mapKeyPath(path, nameMapper)})`; 243 | } else if ('$type' in condition) { 244 | const valueName = valueMapper.map(condition.$type); 245 | clauseString = `attribute_type(${mapKeyPath( 246 | path, 247 | nameMapper 248 | )},${valueName})`; 249 | } else if ('$beginsWith' in condition) { 250 | const valueName = valueMapper.map(condition.$beginsWith); 251 | clauseString = `begins_with(${mapKeyPath( 252 | path, 253 | nameMapper 254 | )},${valueName})`; 255 | } else if ('$contains' in condition) { 256 | const valueName = valueMapper.map(condition.$contains); 257 | clauseString = `contains(${mapKeyPath(path, nameMapper)},${valueName})`; 258 | } else 259 | throw new InvalidCompositeConditionException( 260 | 'unknown operator', 261 | keyParsePath 262 | ); 263 | 264 | clauses.push(clauseString); 265 | }); 266 | 267 | return clauses.join(' AND '); 268 | }; 269 | -------------------------------------------------------------------------------- /src/base/conditions_types.ts: -------------------------------------------------------------------------------- 1 | import { NameMapper, ValueMapper } from './mappers'; 2 | 3 | /** 4 | * An element in the expression parse tree, used 5 | * to assist error messaging. 6 | */ 7 | export type ParseElement = 8 | | { type: 'array'; index: number } 9 | | { type: 'object'; key: string }; 10 | 11 | /** 12 | * @internal 13 | * 14 | * A context object passed between 15 | * parse functions to track value and name 16 | * mapping and current parse context. 17 | */ 18 | export type ConditionParseContext = { 19 | nameMapper: NameMapper; 20 | valueMapper: ValueMapper; 21 | parsePath: ParseElement[]; 22 | }; 23 | -------------------------------------------------------------------------------- /src/base/exceptions.ts: -------------------------------------------------------------------------------- 1 | import { VError, Options as VErrorOptions } from 'verror'; 2 | import { ParseElement } from './conditions_types'; 3 | 4 | /** 5 | * Thrown when insert() is called with a specified _id 6 | * that already exists 7 | */ 8 | export class ConflictException extends VError { 9 | constructor(message: string, id: string) { 10 | super({ info: { id }, name: 'conflict.error' }, message); 11 | } 12 | } 13 | 14 | /** 15 | * Thrown when a replace or delete request(s) are rejected as part of transaction 16 | * that already exists 17 | */ 18 | export class TransactionConflictException extends VError { 19 | constructor(message: string) { 20 | super({ name: 'transaction_conflict.error' }, message); 21 | } 22 | } 23 | 24 | /** 25 | * Thrown when an `_id` value is specified that is not 26 | * valid (_id values must be a string). 27 | */ 28 | export class InvalidIdException extends VError { 29 | constructor(id: unknown) { 30 | super( 31 | { info: { id }, name: 'invalid_id.error' }, 32 | 'The provided document has an invalid ID' 33 | ); 34 | } 35 | } 36 | 37 | /** 38 | * Thrown when the child object to be inserted has a missing or invalid 39 | * parent key value. 40 | */ 41 | export class InvalidParentIdException extends VError { 42 | constructor( 43 | parentId: unknown, 44 | collectionName: string, 45 | parentCollectionName: string 46 | ) { 47 | super( 48 | { 49 | info: { 50 | parentId, 51 | collectionName, 52 | parentCollectionName, 53 | }, 54 | name: 'invalid_parent_id.error', 55 | }, 56 | `The provided document has a missing parent ID or it is the incorrect type` 57 | ); 58 | } 59 | } 60 | 61 | /** 62 | * Thrown when the collection specified cannot be found in the context, or 63 | * isn't of the expected type (root or child) for the called API. 64 | */ 65 | export class CollectionNotFoundException extends VError { 66 | constructor(collection: string) { 67 | super( 68 | { info: { collection }, name: 'collection_not_found.error' }, 69 | `Collection not found: '${collection}'` 70 | ); 71 | } 72 | } 73 | 74 | /** 75 | * Thrown when the index is not found for the provided query. 76 | */ 77 | export class IndexNotFoundException extends VError { 78 | constructor(index: string) { 79 | super( 80 | { info: { index }, name: 'index_not_found.error' }, 81 | `Index not found: ${index}` 82 | ); 83 | } 84 | } 85 | 86 | /** 87 | * Thrown during context construction to indicate a configuration 88 | * issue in the specified collections, access patterns or layouts. 89 | */ 90 | export class ConfigurationException extends VError { 91 | constructor(message: string, options: VErrorOptions = {}) { 92 | super( 93 | { 94 | ...options, 95 | name: 'configuration.error', 96 | }, 97 | message 98 | ); 99 | } 100 | } 101 | 102 | /** 103 | * Thrown on invalid field values that are provided during 104 | * persistence (insert/update/replace). 105 | */ 106 | export class InvalidIndexedFieldValueException extends VError { 107 | constructor( 108 | message: string, 109 | { collection, keyPath }: { collection: string; keyPath: string[] } 110 | ) { 111 | super( 112 | { 113 | info: { 114 | collection, 115 | keyPath, 116 | }, 117 | name: 'invalid_indexed_field_value.error', 118 | }, 119 | message 120 | ); 121 | } 122 | } 123 | 124 | /** 125 | * Thrown on an invalidly specified query provided to a 126 | * `find()` operation. 127 | */ 128 | export class InvalidQueryException extends VError { 129 | constructor( 130 | message: string, 131 | { 132 | collection, 133 | query, 134 | }: { collection: string; query: Record } 135 | ) { 136 | super( 137 | { 138 | info: { 139 | collection, 140 | query, 141 | }, 142 | name: 'invalid_query.error', 143 | }, 144 | message 145 | ); 146 | } 147 | } 148 | 149 | /** 150 | * Thrown when the updates provided to an `update()` operation 151 | * are invalid. 152 | */ 153 | export class InvalidUpdatesException extends VError { 154 | constructor(message: string) { 155 | super( 156 | { 157 | name: 'invalid_updates.error', 158 | }, 159 | message 160 | ); 161 | } 162 | } 163 | 164 | /** 165 | * Thrown when the update values provided in an `update()` operation 166 | * are invalid (e.g. `undefined`) 167 | */ 168 | export class InvalidUpdateValueException extends VError { 169 | constructor(path: string, message: string) { 170 | super( 171 | { 172 | name: 'invalid_update_value', 173 | info: { 174 | path, 175 | }, 176 | }, 177 | message 178 | ); 179 | } 180 | } 181 | 182 | /** 183 | * Throw when validations fails for arguments passed for any operation 184 | */ 185 | export class InvalidArgumentException extends VError { 186 | constructor(message: string) { 187 | super( 188 | { 189 | name: 'invalid_arguments', 190 | }, 191 | message 192 | ); 193 | } 194 | } 195 | 196 | /** 197 | * Throw when the find descriptor for a transactFindByIds or 198 | * batchFindByIds is invalid. 199 | */ 200 | export class InvalidFindDescriptorException extends VError { 201 | constructor(message: string) { 202 | super( 203 | { 204 | name: 'invalid_find_descriptor', 205 | }, 206 | message 207 | ); 208 | } 209 | } 210 | 211 | /** 212 | * Thrown when something from DynamoDB doesn't match 213 | * the context configuration during response processing 214 | */ 215 | export class InternalProcessingException extends VError { 216 | constructor(message: string) { 217 | super({ name: 'internal_processing' }, message); 218 | } 219 | } 220 | 221 | /** 222 | * @internal 223 | * 224 | * Print out a condition parser path for debugging / error handling 225 | * */ 226 | const printParsePath = (parsePath: ParseElement[]): string => { 227 | return parsePath 228 | .map((e) => { 229 | if (e.type === 'array') return e.index === 0 ? `[` : `[...@${e.index}:`; 230 | return `{ ${e.key}: `; 231 | }) 232 | .join(''); 233 | }; 234 | 235 | /** 236 | * Thrown when there is a problem with the expression given 237 | * as a `FilterExpression` or `ConditionExpression`. 238 | */ 239 | export class InvalidCompositeConditionException extends VError { 240 | constructor(message: string, parsePath: ParseElement[]) { 241 | super( 242 | { 243 | name: 'invalid_composite_condition', 244 | info: { parsePath }, 245 | }, 246 | `Condition parse exception: ${message} at ${printParsePath(parsePath)}` 247 | ); 248 | } 249 | } 250 | 251 | /** 252 | * An exception thrown when a bad set of 253 | * [[BatchReplaceDeleteDescriptor]] is given to 254 | * [[batchReplaceDelete]] 255 | */ 256 | export class InvalidBatchReplaceDeleteDescriptorException extends VError { 257 | constructor(message: string, info?: Record) { 258 | super({ name: 'invalid_batch_replace_descriptor', info }, message); 259 | } 260 | } 261 | 262 | /** 263 | * When a TransactGetItems request conflicts with an 264 | * ongoing TransactWriteItems operation on one or 265 | * more items in the TransactGetItems request 266 | */ 267 | export class TransactionCanceledException extends VError { 268 | constructor(message: string, info?: Record) { 269 | super({ name: 'transaction_cancelled', info }, message); 270 | } 271 | } 272 | 273 | export class InvalidRangeOperatorException extends VError { 274 | constructor(message: string, operator: string) { 275 | super({ name: 'invalid_range_operator', info: { operator } }, message); 276 | } 277 | } 278 | 279 | export class IndexAccessPatternTypeException extends VError { 280 | constructor(message: string) { 281 | super({ name: 'index_access_pattern_type' }, message); 282 | } 283 | } 284 | 285 | /** 286 | * DynamoDB rejected the request because you retried a request with 287 | * a different payload but with an idempotent token that was already used. 288 | */ 289 | export class IdempotentParameterMismatchException extends VError { 290 | constructor(message: string, info?: Record) { 291 | super({ name: 'idempotent_parameter_mismatch', info }, message); 292 | } 293 | } 294 | 295 | /** 296 | * Transaction request cannot include multiple operations on one item 297 | */ 298 | export class TransactionValidationException extends VError { 299 | constructor(message: string) { 300 | super({ name: 'transaction_validation' }, message); 301 | } 302 | } 303 | /** 304 | * The transaction with the given request token is already in progress 305 | */ 306 | export class TransactionInProgressException extends VError { 307 | constructor(message: string, info?: Record) { 308 | super({ name: 'transaction_in_progress', info }, message); 309 | } 310 | } 311 | -------------------------------------------------------------------------------- /src/base/expression_util.test.ts: -------------------------------------------------------------------------------- 1 | import { isSafeAttributeName } from './expression_util'; 2 | 3 | describe('isSafeAttributeName', () => { 4 | it('should return false on reserved words of any case', () => { 5 | expect(isSafeAttributeName('agent')).toBe(false); 6 | expect(isSafeAttributeName('CLUSTERING')).toBe(false); 7 | expect(isSafeAttributeName('Comment')).toBe(false); 8 | }); 9 | 10 | it('should return false on attribute names that are not safe to use unescaped', () => { 11 | expect(isSafeAttributeName('not safe')).toBe(false); 12 | expect(isSafeAttributeName('a.dotted.Value')).toBe(false); 13 | expect(isSafeAttributeName('3value')).toBe(false); 14 | }); 15 | 16 | it('should return true on safe attribute names', () => { 17 | expect(isSafeAttributeName('animalType')).toBe(true); 18 | expect(isSafeAttributeName('usertype1')).toBe(true); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /src/base/expression_util.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | // prettier-ignore 3 | /** @internal */ 4 | const DYNAMODB_RESERVED_WORDS = [ 'ABORT', 'ABSOLUTE', 'ACTION', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARCHIVE', 'ARE', 'ARRAY', 'AS', 'ASC', 'ASCII', 'ASENSITIVE', 'ASSERTION', 'ASYMMETRIC', 'AT', 'ATOMIC', 'ATTACH', 'ATTRIBUTE', 'AUTH', 'AUTHORIZATION', 'AUTHORIZE', 'AUTO', 'AVG', 'BACK', 'BACKUP', 'BASE', 'BATCH', 'BEFORE', 'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BLOCK', 'BOOLEAN', 'BOTH', 'BREADTH', 'BUCKET', 'BULK', 'BY', 'BYTE', 'CALL', 'CALLED', 'CALLING', 'CAPACITY', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CHAR', 'CHARACTER', 'CHECK', 'CLASS', 'CLOB', 'CLOSE', 'CLUSTER', 'CLUSTERED', 'CLUSTERING', 'CLUSTERS', 'COALESCE', 'COLLATE', 'COLLATION', 'COLLECTION', 'COLUMN', 'COLUMNS', 'COMBINE', 'COMMENT', 'COMMIT', 'COMPACT', 'COMPILE', 'COMPRESS', 'CONDITION', 'CONFLICT', 'CONNECT', 'CONNECTION', 'CONSISTENCY', 'CONSISTENT', 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONSUMED', 'CONTINUE', 'CONVERT', 'COPY', 'CORRESPONDING', 'COUNT', 'COUNTER', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DATETIME', 'DAY', 'DEALLOCATE', 'DEC', 'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFERRABLE', 'DEFERRED', 'DEFINE', 'DEFINED', 'DEFINITION', 'DELETE', 'DELIMITED', 'DEPTH', 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DETACH', 'DETERMINISTIC', 'DIAGNOSTICS', 'DIRECTORIES', 'DISABLE', 'DISCONNECT', 'DISTINCT', 'DISTRIBUTE', 'DO', 'DOMAIN', 'DOUBLE', 'DROP', 'DUMP', 'DURATION', 'DYNAMIC', 'EACH', 'ELEMENT', 'ELSE', 'ELSEIF', 'EMPTY', 'ENABLE', 'END', 'EQUAL', 'EQUALS', 'ERROR', 'ESCAPE', 'ESCAPED', 'EVAL', 'EVALUATE', 'EXCEEDED', 'EXCEPT', 'EXCEPTION', 'EXCEPTIONS', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXPLODE', 'EXPORT', 'EXPRESSION', 'EXTENDED', 'EXTERNAL', 'EXTRACT', 'FAIL', 'FALSE', 'FAMILY', 'FETCH', 'FIELDS', 'FILE', 'FILTER', 'FILTERING', 'FINAL', 'FINISH', 'FIRST', 'FIXED', 'FLATTERN', 'FLOAT', 'FOR', 'FORCE', 'FOREIGN', 'FORMAT', 'FORWARD', 'FOUND', 'FREE', 'FROM', 'FULL', 'FUNCTION', 'FUNCTIONS', 'GENERAL', 'GENERATE', 'GET', 'GLOB', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GREATER', 'GROUP', 'GROUPING', 'HANDLER', 'HASH', 'HAVE', 'HAVING', 'HEAP', 'HIDDEN', 'HOLD', 'HOUR', 'IDENTIFIED', 'IDENTITY', 'IF', 'IGNORE', 'IMMEDIATE', 'IMPORT', 'IN', 'INCLUDING', 'INCLUSIVE', 'INCREMENT', 'INCREMENTAL', 'INDEX', 'INDEXED', 'INDEXES', 'INDICATOR', 'INFINITE', 'INITIALLY', 'INLINE', 'INNER', 'INNTER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITEM', 'ITEMS', 'ITERATE', 'JOIN', 'KEY', 'KEYS', 'LAG', 'LANGUAGE', 'LARGE', 'LAST', 'LATERAL', 'LEAD', 'LEADING', 'LEAVE', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', 'LIMITED', 'LINES', 'LIST', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', 'LOCATOR', 'LOCK', 'LOCKS', 'LOG', 'LOGED', 'LONG', 'LOOP', 'LOWER', 'MAP', 'MATCH', 'MATERIALIZED', 'MAX', 'MAXLEN', 'MEMBER', 'MERGE', 'METHOD', 'METRICS', 'MIN', 'MINUS', 'MINUTE', 'MISSING', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MODULE', 'MONTH', 'MULTI', 'MULTISET', 'NAME', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB', 'NEW', 'NEXT', 'NO', 'NONE', 'NOT', 'NULL', 'NULLIF', 'NUMBER', 'NUMERIC', 'OBJECT', 'OF', 'OFFLINE', 'OFFSET', 'OLD', 'ON', 'ONLINE', 'ONLY', 'OPAQUE', 'OPEN', 'OPERATOR', 'OPTION', 'OR', 'ORDER', 'ORDINALITY', 'OTHER', 'OTHERS', 'OUT', 'OUTER', 'OUTPUT', 'OVER', 'OVERLAPS', 'OVERRIDE', 'OWNER', 'PAD', 'PARALLEL', 'PARAMETER', 'PARAMETERS', 'PARTIAL', 'PARTITION', 'PARTITIONED', 'PARTITIONS', 'PATH', 'PERCENT', 'PERCENTILE', 'PERMISSION', 'PERMISSIONS', 'PIPE', 'PIPELINED', 'PLAN', 'POOL', 'POSITION', 'PRECISION', 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVATE', 'PRIVILEGES', 'PROCEDURE', 'PROCESSED', 'PROJECT', 'PROJECTION', 'PROPERTY', 'PROVISIONING', 'PUBLIC', 'PUT', 'QUERY', 'QUIT', 'QUORUM', 'RAISE', 'RANDOM', 'RANGE', 'RANK', 'RAW', 'READ', 'READS', 'REAL', 'REBUILD', 'RECORD', 'RECURSIVE', 'REDUCE', 'REF', 'REFERENCE', 'REFERENCES', 'REFERENCING', 'REGEXP', 'REGION', 'REINDEX', 'RELATIVE', 'RELEASE', 'REMAINDER', 'RENAME', 'REPEAT', 'REPLACE', 'REQUEST', 'RESET', 'RESIGNAL', 'RESOURCE', 'RESPONSE', 'RESTORE', 'RESTRICT', 'RESULT', 'RETURN', 'RETURNING', 'RETURNS', 'REVERSE', 'REVOKE', 'RIGHT', 'ROLE', 'ROLES', 'ROLLBACK', 'ROLLUP', 'ROUTINE', 'ROW', 'ROWS', 'RULE', 'RULES', 'SAMPLE', 'SATISFIES', 'SAVE', 'SAVEPOINT', 'SCAN', 'SCHEMA', 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECTION', 'SEGMENT', 'SEGMENTS', 'SELECT', 'SELF', 'SEMI', 'SENSITIVE', 'SEPARATE', 'SEQUENCE', 'SERIALIZABLE', 'SESSION', 'SET', 'SETS', 'SHARD', 'SHARE', 'SHARED', 'SHORT', 'SHOW', 'SIGNAL', 'SIMILAR', 'SIZE', 'SKEWED', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SOURCE', 'SPACE', 'SPACES', 'SPARSE', 'SPECIFIC', 'SPECIFICTYPE', 'SPLIT', 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNING', 'START', 'STATE', 'STATIC', 'STATUS', 'STORAGE', 'STORE', 'STORED', 'STREAM', 'STRING', 'STRUCT', 'STYLE', 'SUB', 'SUBMULTISET', 'SUBPARTITION', 'SUBSTRING', 'SUBTYPE', 'SUM', 'SUPER', 'SYMMETRIC', 'SYNONYM', 'SYSTEM', 'TABLE', 'TABLESAMPLE', 'TEMP', 'TEMPORARY', 'TERMINATED', 'TEXT', 'THAN', 'THEN', 'THROUGHPUT', 'TIME', 'TIMESTAMP', 'TIMEZONE', 'TINYINT', 'TO', 'TOKEN', 'TOTAL', 'TOUCH', 'TRAILING', 'TRANSACTION', 'TRANSFORM', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE', 'TRUNCATE', 'TTL', 'TUPLE', 'TYPE', 'UNDER', 'UNDO', 'UNION', 'UNIQUE', 'UNIT', 'UNKNOWN', 'UNLOGGED', 'UNNEST', 'UNPROCESSED', 'UNSIGNED', 'UNTIL', 'UPDATE', 'UPPER', 'URL', 'USAGE', 'USE', 'USER', 'USERS', 'USING', 'UUID', 'VACUUM', 'VALUE', 'VALUED', 'VALUES', 'VARCHAR', 'VARIABLE', 'VARIANCE', 'VARINT', 'VARYING', 'VIEW', 'VIEWS', 'VIRTUAL', 'VOID', 'WAIT', 'WHEN', 'WHENEVER', 'WHERE', 'WHILE', 'WINDOW', 'WITH', 'WITHIN', 'WITHOUT', 'WORK', 'WRAPPED', 'WRITE', 'YEAR', 'ZONE' ]; 5 | /* eslint-enable */ 6 | 7 | /** 8 | * @internal 9 | */ 10 | export const isReservedWord = (word: string): boolean => 11 | DYNAMODB_RESERVED_WORDS.includes(word.toUpperCase()); 12 | 13 | /** 14 | * @internal 15 | */ 16 | export const isSafeAttributeName = (attributeName: string): boolean => { 17 | if (isReservedWord(attributeName)) { 18 | return false; 19 | } 20 | return /^[A-Za-z][A-Za-z0-9]*$/.test(attributeName); 21 | }; 22 | -------------------------------------------------------------------------------- /src/base/layout.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The structure of your primary index. Your table 3 | * must have a partition and sort keys, both of 4 | * type 'S' (string) 5 | */ 6 | export type PrimaryIndexLayout = { 7 | /** 8 | * The name of the primary partition key 9 | */ 10 | partitionKey: string; 11 | /** 12 | * The name of the primary sort key 13 | */ 14 | sortKey: string; 15 | }; 16 | 17 | /** 18 | * The structure of a Global Secondary Index 19 | */ 20 | export type SecondaryIndexLayout = { 21 | /** 22 | * The name of the secondary index 23 | */ 24 | indexName: string; 25 | /** 26 | * The name of the secondary index partition key 27 | */ 28 | partitionKey: string; 29 | /** 30 | * The name of the secondary index sort key. If 31 | * your index does not have one, leave this as 32 | * `undefined` 33 | */ 34 | sortKey?: string; 35 | }; 36 | 37 | /** 38 | * Defines the structure of the underlying table of a collection, 39 | * such as its table name, primary keys, sort keys and TTL 40 | * attribute name. 41 | * 42 | * You can also customise the index name separator 43 | */ 44 | export interface CollectionLayout { 45 | /** Name of the table */ 46 | tableName: string; 47 | /** Layout of the primary key */ 48 | primaryKey: PrimaryIndexLayout; 49 | /** 50 | * Optional secondary find keys for additional lookups 51 | */ 52 | findKeys?: SecondaryIndexLayout[]; 53 | 54 | /** 55 | * The name of the attribute configured as the 56 | * TimeToLiveSpecification attribute 57 | */ 58 | ttlAttribute?: string; 59 | 60 | /** 61 | * The separator to use in index keys (defaults to `|-|`) 62 | */ 63 | indexKeySeparator?: string; 64 | } 65 | -------------------------------------------------------------------------------- /src/base/lexo.test.ts: -------------------------------------------------------------------------------- 1 | import { decrementLast, incrementLast } from './lexo'; 2 | 3 | describe('Lexographical operations', () => { 4 | describe('incrementLast', () => { 5 | it('should increment the empty string correctly', () => 6 | expect(incrementLast('')).toBe(String.fromCodePoint(0))); 7 | it('should increment a random value correctly', () => 8 | expect(incrementLast('123')).toBe('124')); 9 | it('should increment on the last character being max correctly', () => 10 | expect(incrementLast('12\uFFFF')).toBe( 11 | '13\u0000' 12 | )); 13 | }); 14 | describe('decrementLast', () => { 15 | it('should decrement the empty string correctly', () => 16 | expect(decrementLast('')).toBe('')); 17 | it('should decrement a random value correctly', () => 18 | expect(decrementLast('123')).toBe('122')); 19 | it('should decrement on the last character being min correctly', () => 20 | expect(decrementLast('12\u0000')).toBe( 21 | '11\uFFFF' 22 | )); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /src/base/lexo.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @private 3 | * Increment the last character to one character lexigraphically higher. It 4 | * might not be a valid Unicode character, but that shouldn't matter as we're 5 | * only using them for range expressions. 6 | * 7 | * NOTE: I'm not sure if this is 100% correct as Unicode codepoints can go 8 | * up to 0x11000, but working on the boundary condition of 0x10FFF isn't 9 | * a valid codepoint so JavaScript falls back to code-unit behaviour. For 10 | * DynamoDB purposes, this may need to be done as a straight Buffer man- 11 | * ipulation. 12 | * 13 | * @param x the string to increment the last character of 14 | * @returns the transformed string 15 | */ 16 | export const incrementLast = (x: string): string => { 17 | if (x.length === 0) { 18 | return '\u0000'; 19 | } 20 | const prefix = x.slice(0, x.length - 1); 21 | const lastCodePoint = x[x.length - 1].codePointAt(0) ?? 0; 22 | if (lastCodePoint >= 0xffff ) { 23 | return incrementLast(prefix) + String.fromCodePoint(0); 24 | } else { 25 | return prefix + String.fromCodePoint(lastCodePoint + 1); 26 | } 27 | }; 28 | /** 29 | * @private 30 | * Decrement the last character to one character lexigraphically lower. It 31 | * might not be a valid Unicode character, but that shouldn't matter as we're 32 | * only using them for range expressions 33 | * 34 | * NOTE: I'm not sure if this is 100% correct as Unicode codepoints can go 35 | * up to 0x11000, but working on the boundary condition of 0x10FFF isn't 36 | * a valid codepoint, so JavaScript falls back to code-unit behaviour. For 37 | * DynamoDB purposes, this may need to be done as a straight Buffer man- 38 | * ipulation. 39 | * 40 | * @param x the string to increment the last character of 41 | * @returns the transformed string 42 | */ 43 | export const decrementLast = (x: string): string => { 44 | if (x.length === 0) { 45 | return ''; // This isn't really right but there is nothing lower than the empty string 46 | } 47 | const prefix = x.slice(0, x.length - 1); 48 | const lastCodePoint = x[x.length - 1].codePointAt(0) ?? 0; 49 | if (lastCodePoint === 0) { 50 | return decrementLast(prefix) + String.fromCodePoint(0xFFFF); 51 | } else { 52 | return prefix + String.fromCodePoint(lastCodePoint - 1); 53 | } 54 | }; 55 | -------------------------------------------------------------------------------- /src/base/mappers.test.ts: -------------------------------------------------------------------------------- 1 | import { createNameMapper, createValueMapper } from './mappers'; 2 | 3 | describe('createNameMapper', () => { 4 | it('should return a mapper when instantiated', () => { 5 | expect(createNameMapper()).toHaveProperty('get'); 6 | expect(createNameMapper()).toHaveProperty('map'); 7 | }); 8 | 9 | it('should use the mappedName value when provided if not already mapped', () => { 10 | const mapper = createNameMapper(); 11 | expect(mapper.map('1unsafe', '#attrx')).toBe('#attrx'); 12 | expect(mapper.map('1unsafe')).toBe('#attrx'); 13 | expect(mapper.map('1unsafe', '#attry')).toBe('#attrx'); 14 | }); 15 | 16 | it('should collect and add mapping properly', () => { 17 | const mapper = createNameMapper(); 18 | 19 | // Remember that .map() calls are mutating the mapper internally 20 | // i.e. it has side-effects internally. 21 | expect(mapper.map('safeattribute1')).toBe('safeattribute1'); 22 | expect(mapper.map('not.a.safe.one')).toBe('#attr0'); 23 | expect(mapper.map('attribute')).toBe('#attr1'); 24 | expect(mapper.map('1unsafe')).toBe('#attr2'); 25 | 26 | // repeating a request should just return the previous match 27 | expect(mapper.map('attribute')).toBe('#attr1'); 28 | 29 | expect(mapper.get()).toEqual({ 30 | '#attr0': 'not.a.safe.one', 31 | '#attr1': 'attribute', 32 | '#attr2': '1unsafe', 33 | }); 34 | }); 35 | }); 36 | 37 | describe('createValueMapper', () => { 38 | it('should return a mapper when instantiated', () => { 39 | expect(createValueMapper()).toHaveProperty('get'); 40 | expect(createValueMapper()).toHaveProperty('map'); 41 | }); 42 | 43 | it('should collect and add mapping properly', () => { 44 | const mapper = createValueMapper(); 45 | 46 | // Remember that .map() calls are mutating the mapper internally 47 | // i.e. it has side-effects internally. 48 | expect(mapper.map('a string value')).toBe(':value0'); 49 | expect(mapper.map(1234)).toBe(':value1'); 50 | expect( 51 | mapper.map([ 52 | { name: '1', value: 1 }, 53 | { name: '2', value: null }, 54 | ]) 55 | ).toBe(':value2'); 56 | expect(mapper.map({ anObject: true })).toBe(':value3'); 57 | 58 | // repeated values are mapped again 59 | expect(mapper.map(1234)).toBe(':value4'); 60 | 61 | expect(mapper.get()).toEqual({ 62 | // values appear in DynamoDB format already 63 | ':value0': { S: 'a string value' }, 64 | ':value1': { N: '1234' }, 65 | ':value2': { 66 | L: [ 67 | { M: { name: { S: '1' }, value: { N: '1' } } }, 68 | { M: { name: { S: '2' }, value: { NULL: true } } }, 69 | ], 70 | }, 71 | ':value3': { 72 | M: { 73 | anObject: { BOOL: true }, 74 | }, 75 | }, 76 | ':value4': { N: '1234' }, 77 | }); 78 | }); 79 | }); 80 | -------------------------------------------------------------------------------- /src/base/mappers.ts: -------------------------------------------------------------------------------- 1 | import { convertToAttr } from '@aws-sdk/util-dynamodb'; 2 | import type { AttributeValue } from '@aws-sdk/client-dynamodb'; 3 | import { isSafeAttributeName } from './expression_util'; 4 | import { invertMap } from './util'; 5 | 6 | /** 7 | * @internal 8 | */ 9 | export type NameMapper = { 10 | map(name: string, mappedName?: string): string; 11 | get(): { [mappedName: string]: string } | undefined; 12 | }; 13 | 14 | /** 15 | * @internal 16 | * 17 | * Create a mapper for generating `ExpressionAttributeNames` 18 | * entries. [[map]] will generate a new attribute name 19 | * that can be used in expressions for every attribute it 20 | * is given. 21 | * 22 | * The value for `ExpressionAttributeNames` can be 23 | * returned by [[get]] at the end. 24 | */ 25 | export const createNameMapper = (): NameMapper => { 26 | let currentIndex = 0; 27 | const attributeNameMap = new Map(); 28 | 29 | return { 30 | /** 31 | * Generate an expression attribute name for 32 | * `name` (if necessary - values not requiring 33 | * escaping will be returned as-is) 34 | */ 35 | map(name: string, mappedName?: string): string { 36 | if (!mappedName && isSafeAttributeName(name)) { 37 | return name; 38 | } 39 | let nameMapping = attributeNameMap.get(name); 40 | if (!nameMapping) { 41 | nameMapping = mappedName ?? `#attr${currentIndex++}`; 42 | attributeNameMap.set(name, nameMapping); 43 | } 44 | return nameMapping; 45 | }, 46 | 47 | /** 48 | * Return the map of attribute names 49 | */ 50 | get(): Record | undefined { 51 | const result = invertMap(attributeNameMap); 52 | if (Object.keys(result).length === 0) return undefined; 53 | return result; 54 | }, 55 | }; 56 | }; 57 | 58 | /** 59 | * @internal 60 | */ 61 | export type ValueMapper = { 62 | map(value: unknown): string; 63 | get(): { [mappedName: string]: AttributeValue } | undefined; 64 | }; 65 | 66 | /** 67 | * @internal 68 | * 69 | * Create a mapper for generating `ExpressionAttributeValues` 70 | * entries. [[map]] will generate a new attribute name 71 | * that can be used in expressions for every attribute it 72 | * is given. 73 | * 74 | * The value for `ExpressionAttributeValues` can be 75 | * returned by [[get]] at the end. 76 | */ 77 | export const createValueMapper = (): ValueMapper => { 78 | let currentIndex = 0; 79 | const valueMap = new Map(); 80 | 81 | return { 82 | /** 83 | * Given `value`, marshall it to DynamoDB format, store 84 | * it internally, and return the `:value` reference that 85 | * can be used in expressions 86 | */ 87 | map(value: unknown): string { 88 | const valueKey = `:value${currentIndex++}`; 89 | const convertedValue = convertToAttr(value, { 90 | convertEmptyValues: false, 91 | removeUndefinedValues: true, 92 | }); 93 | valueMap.set(valueKey, convertedValue); 94 | return valueKey; 95 | }, 96 | 97 | /** 98 | * Get the map for `ExpressionAttributeValues` 99 | */ 100 | get(): { [key: string]: AttributeValue } | undefined { 101 | if (valueMap.size === 0) return undefined; 102 | return Array.from(valueMap).reduce((obj, [key, value]) => { 103 | obj[key] = value; 104 | return obj; 105 | }, {} as { [key: string]: AttributeValue }); 106 | }, 107 | }; 108 | }; 109 | -------------------------------------------------------------------------------- /src/base/new_id.ts: -------------------------------------------------------------------------------- 1 | // Code adapted from https://github.com/mongodb/js-bson/blob/master/lib/objectid.js 2 | // License: Apache License 2.0 3 | 4 | import { randomBytes } from 'crypto'; 5 | 6 | /** @internal */ 7 | const PROCESS_UNIQUE = randomBytes(5); 8 | /** @internal */ 9 | let IdIndex = ~~(Math.random() * 0xffffff); 10 | /** @internal */ 11 | const getInc = (): number => (IdIndex = (IdIndex + 1) % 0xffffff); 12 | 13 | /** 14 | * @internal 15 | * Generate a new ID. Uses the Mongo BSON ID generation algorithm. 16 | */ 17 | const newId = (): string => { 18 | const time = ~~(Date.now() / 1000); 19 | 20 | const inc = getInc(); 21 | const buffer = Buffer.alloc(12); 22 | 23 | // 4-byte timestamp 24 | buffer[3] = time & 0xff; 25 | buffer[2] = (time >> 8) & 0xff; 26 | buffer[1] = (time >> 16) & 0xff; 27 | buffer[0] = (time >> 24) & 0xff; 28 | 29 | // 5-byte process unique 30 | buffer[4] = PROCESS_UNIQUE[0]; 31 | buffer[5] = PROCESS_UNIQUE[1]; 32 | buffer[6] = PROCESS_UNIQUE[2]; 33 | buffer[7] = PROCESS_UNIQUE[3]; 34 | buffer[8] = PROCESS_UNIQUE[4]; 35 | 36 | // 3-byte counter 37 | buffer[11] = inc & 0xff; 38 | buffer[10] = (inc >> 8) & 0xff; 39 | buffer[9] = (inc >> 16) & 0xff; 40 | 41 | return buffer.toString('hex'); 42 | }; 43 | 44 | export default newId; 45 | -------------------------------------------------------------------------------- /src/context/context_types.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { 3 | CollectionDefinition, 4 | ChildCollectionDefinition, 5 | RootCollectionDefinition, 6 | } from '../base/collection_definition'; 7 | 8 | /** 9 | * The internal representation of the context object. 10 | * 11 | * **This type should be considered opaque and subject to change.** 12 | */ 13 | export interface DynaglueContext { 14 | ddb: DynamoDBClient; 15 | definitions: Map; 16 | rootDefinitions: Map; 17 | childDefinitions: Map; 18 | } 19 | -------------------------------------------------------------------------------- /src/context/extract_keys.test.ts: -------------------------------------------------------------------------------- 1 | import { CollectionLayout } from '../base/layout'; 2 | import { buildAndValidateAccessPatterns } from './extract_keys'; 3 | import { Collection } from '../base/collection'; 4 | 5 | describe('buildAndValidateAccessPatterns', () => { 6 | const partitionKeyName = 'identifier'; 7 | const sortKeyName = 'collectionReference'; 8 | const basicLayout: CollectionLayout = { 9 | tableName: 'table1', 10 | primaryKey: { partitionKey: partitionKeyName, sortKey: sortKeyName }, 11 | }; 12 | 13 | const multiIndexLayout: CollectionLayout = { 14 | ...basicLayout, 15 | findKeys: [ 16 | { indexName: 'index1', partitionKey: 'partkey1' }, 17 | { indexName: 'index2', partitionKey: 'partkey2', sortKey: 'sortkey2' }, 18 | { indexName: 'index3', partitionKey: 'partkey3', sortKey: 'sortkey3' }, 19 | ], 20 | }; 21 | 22 | test('produces correct results when there is no access patterns', () => { 23 | const collection = { 24 | name: 'users', 25 | layout: basicLayout, 26 | }; 27 | expect(buildAndValidateAccessPatterns(collection)).toEqual([]); 28 | }); 29 | 30 | test('throws when an index has already been used', () => { 31 | const collection: Collection = { 32 | name: 'staff', 33 | layout: multiIndexLayout, 34 | accessPatterns: [ 35 | { indexName: 'index1', partitionKeys: [['email']] }, 36 | { indexName: 'index1', partitionKeys: [['department']] }, 37 | ], 38 | }; 39 | expect(() => buildAndValidateAccessPatterns(collection)).toThrowError( 40 | /refers to index in use by another pattern/ 41 | ); 42 | }); 43 | 44 | test('throws when an non-existent index is referenced', () => { 45 | const collection: Collection = { 46 | name: 'staff', 47 | layout: multiIndexLayout, 48 | accessPatterns: [ 49 | { indexName: 'index1', partitionKeys: [['email']] }, 50 | { indexName: 'index50', partitionKeys: [['department']] }, 51 | ], 52 | }; 53 | expect(() => buildAndValidateAccessPatterns(collection)).toThrowError( 54 | /refers to index missing from layout/ 55 | ); 56 | }); 57 | 58 | test('throws when an access pattern defines sort keys but the referenced index does not', () => { 59 | const collection: Collection = { 60 | name: 'staff', 61 | layout: multiIndexLayout, 62 | accessPatterns: [ 63 | { 64 | indexName: 'index1', 65 | partitionKeys: [['department']], 66 | sortKeys: [['discipline']], 67 | }, 68 | { indexName: 'index2', partitionKeys: [['email']] }, 69 | ], 70 | }; 71 | expect(() => buildAndValidateAccessPatterns(collection)).toThrowError( 72 | /has sort keys but index .+ does not/ 73 | ); 74 | }); 75 | 76 | test('throws when an access pattern has no sort keys but the referenced index defines a sort key', () => { 77 | const collection: Collection = { 78 | name: 'staff', 79 | layout: multiIndexLayout, 80 | accessPatterns: [{ indexName: 'index2', partitionKeys: [['email']] }], 81 | }; 82 | expect(() => buildAndValidateAccessPatterns(collection)).toThrowError( 83 | /access pattern .+ does not have sort keys but index/ 84 | ); 85 | }); 86 | 87 | test('builds extract keys properly from a correcty defined set of access patterns', () => { 88 | const collection: Collection = { 89 | name: 'staff', 90 | layout: multiIndexLayout, 91 | accessPatterns: [ 92 | { indexName: 'index1', partitionKeys: [['email']] }, 93 | { 94 | indexName: 'index2', 95 | partitionKeys: [['department']], 96 | sortKeys: [['discipline']], 97 | }, 98 | ], 99 | }; 100 | 101 | expect(buildAndValidateAccessPatterns(collection)).toEqual([ 102 | { 103 | type: 'partition', 104 | key: 'partkey1', 105 | valuePaths: [['email']], 106 | options: {}, 107 | }, 108 | { 109 | type: 'partition', 110 | key: 'partkey2', 111 | valuePaths: [['department']], 112 | options: {}, 113 | }, 114 | { 115 | type: 'sort', 116 | key: 'sortkey2', 117 | valuePaths: [['discipline']], 118 | options: {}, 119 | }, 120 | ]); 121 | }); 122 | 123 | test('builds extract keys properly from a correctly defined set of access patterns with required paths', () => { 124 | const collection: Collection = { 125 | name: 'staff', 126 | layout: multiIndexLayout, 127 | accessPatterns: [ 128 | { indexName: 'index1', partitionKeys: [['email']] }, 129 | { 130 | indexName: 'index2', 131 | partitionKeys: [['department']], 132 | sortKeys: [['discipline'], ['location']], 133 | requiredPaths: [['department'], ['discipline']], 134 | }, 135 | ], 136 | }; 137 | 138 | expect(buildAndValidateAccessPatterns(collection)).toEqual([ 139 | { 140 | type: 'partition', 141 | key: 'partkey1', 142 | valuePaths: [['email']], 143 | options: {}, 144 | }, 145 | { 146 | type: 'partition', 147 | key: 'partkey2', 148 | valuePaths: [['department']], 149 | options: {}, 150 | requiredPaths: [['department'], ['discipline']], 151 | }, 152 | { 153 | type: 'sort', 154 | key: 'sortkey2', 155 | valuePaths: [['discipline'], ['location']], 156 | options: {}, 157 | requiredPaths: [['department'], ['discipline']], 158 | }, 159 | ]); 160 | }); 161 | }); 162 | -------------------------------------------------------------------------------- /src/context/extract_keys.ts: -------------------------------------------------------------------------------- 1 | import { KeyPath, AccessPatternOptions } from '../base/access_pattern'; 2 | import { Collection } from '../base/collection'; 3 | import { describeAccessPattern } from '../base/access_pattern'; 4 | import { ConfigurationException } from '../base/exceptions'; 5 | import { ExtractKey } from '../base/collection_definition'; 6 | 7 | /** 8 | * @internal 9 | * Create the extract key value for setting up the context. An extract 10 | * key is an internal definition derived from access patterns to indicate 11 | * a key path that should be extracted for indexing upon insert. 12 | */ 13 | export const withTypeCreateExtractKey = 14 | (type: 'partition' | 'sort') => 15 | ( 16 | key: string, 17 | valuePaths: KeyPath[], 18 | options?: AccessPatternOptions, 19 | requiredPaths?: KeyPath[] 20 | ): ExtractKey => ({ 21 | type, 22 | key, 23 | valuePaths, 24 | options: options || {}, 25 | requiredPaths, 26 | }); 27 | 28 | /** @internal */ 29 | export const createPartitionExtractKey = withTypeCreateExtractKey('partition'); 30 | /** @internal */ 31 | export const createSortExtractKey = withTypeCreateExtractKey('sort'); 32 | 33 | /** @internal 34 | * 35 | * Construct the list of key paths to extract on `insert()` and `replace()` operations 36 | * for a collection, based on its access patterns. 37 | */ 38 | export function buildAndValidateAccessPatterns( 39 | collection: Collection 40 | ): ExtractKey[] { 41 | const wrapperExtractKeys: ExtractKey[] = []; 42 | const findKeys = collection.layout.findKeys || []; 43 | const usedIndexes: string[] = []; 44 | for (const accessPattern of collection.accessPatterns || []) { 45 | const { indexName } = accessPattern; 46 | if (usedIndexes.includes(indexName)) { 47 | throw new ConfigurationException( 48 | `accessPattern ${describeAccessPattern( 49 | accessPattern 50 | )} refers to index in use by another pattern` 51 | ); 52 | } 53 | usedIndexes.push(indexName); 54 | const layout = findKeys.find((key) => key.indexName === indexName); 55 | if (!layout) { 56 | throw new ConfigurationException( 57 | `access pattern ${describeAccessPattern( 58 | accessPattern 59 | )} refers to index missing from layout` 60 | ); 61 | } 62 | wrapperExtractKeys.push( 63 | createPartitionExtractKey( 64 | layout.partitionKey, 65 | accessPattern.partitionKeys, 66 | accessPattern.options, 67 | accessPattern.requiredPaths 68 | ) 69 | ); 70 | 71 | if (accessPattern.sortKeys) { 72 | if (!layout.sortKey) { 73 | throw new ConfigurationException( 74 | `access pattern ${describeAccessPattern( 75 | accessPattern 76 | )} has sort keys but index ${indexName} does not` 77 | ); 78 | } 79 | wrapperExtractKeys.push( 80 | createSortExtractKey( 81 | layout.sortKey, 82 | accessPattern.sortKeys, 83 | accessPattern.options, 84 | accessPattern.requiredPaths 85 | ) 86 | ); 87 | } else if (!accessPattern.sortKeys && layout.sortKey) { 88 | throw new ConfigurationException( 89 | `access pattern ${describeAccessPattern(accessPattern)} does not ` + 90 | `have sort keys but index ${indexName} has one defined - values in this collection will not show up` 91 | ); 92 | } 93 | } 94 | return wrapperExtractKeys; 95 | } 96 | -------------------------------------------------------------------------------- /src/context/index.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { Collection } from '../base/collection'; 3 | import { ConfigurationException } from '../base/exceptions'; 4 | import { validateFindKeys } from './validators'; 5 | import { DynaglueContext } from './context_types'; 6 | import { buildAndValidateAccessPatterns } from './extract_keys'; 7 | import { 8 | ExtractKey, 9 | RootCollectionDefinition, 10 | ChildCollectionDefinition, 11 | } from '../base/collection_definition'; 12 | import isEqual from 'lodash/isEqual'; 13 | import { describeKeyPath } from '../base/access_pattern'; 14 | 15 | /** 16 | * An opaquely defined type 17 | */ 18 | export type Opaque = T & { __TYPE__: K }; 19 | 20 | /** 21 | * A dynaglue context. Use [[createContext]] to instantiate. 22 | * 23 | * The internal layout of this type may change over time. 24 | */ 25 | export type Context = Opaque<'DynaglueContext', DynaglueContext>; 26 | 27 | /** 28 | * Create a context object, with layouts and access patterns for 29 | * storing and retrieving data. 30 | * 31 | * @param dynamodb dynamodb instance, initialised with correct access key and region 32 | * @param collections a list of collection definitions to use with this context 33 | * @returns a context object 34 | * @throws {ConfigurationException} when there is a configuration issue in the given collections 35 | */ 36 | export function createContext( 37 | dynamodb: DynamoDBClient, 38 | collections: Collection[] 39 | ): Context { 40 | const definitions = new Map(); 41 | const rootDefinitions = new Map(); 42 | const childDefinitions = new Map(); 43 | 44 | for (const collection of collections) { 45 | const { name, layout } = collection; 46 | if (definitions.has(name)) { 47 | throw new ConfigurationException( 48 | `Duplicate collection definition: '${name}'` 49 | ); 50 | } 51 | 52 | if (layout.findKeys) { 53 | validateFindKeys(layout.findKeys); 54 | } 55 | 56 | let wrapperExtractKeys: ExtractKey[] = []; 57 | if (collection.accessPatterns) { 58 | wrapperExtractKeys = buildAndValidateAccessPatterns(collection); 59 | } 60 | if (collection.ttlKeyPath) { 61 | const { ttlKeyPath } = collection; 62 | if (!layout.ttlAttribute) { 63 | throw new ConfigurationException( 64 | `Collection '${name}' defines ttlKeyPath=${describeKeyPath( 65 | ttlKeyPath 66 | )} but layout has no ttlAttribute specified` 67 | ); 68 | } 69 | const ttlExtractKey: ExtractKey = { 70 | type: 'ttl', 71 | key: layout.ttlAttribute, 72 | valuePaths: [ttlKeyPath], 73 | options: {}, 74 | }; 75 | wrapperExtractKeys = [...wrapperExtractKeys, ttlExtractKey]; 76 | } 77 | definitions.set(collection.name, { 78 | ...collection, 79 | wrapperExtractKeys, 80 | }); 81 | 82 | if (collection.type === 'child') { 83 | childDefinitions.set(collection.name, { 84 | ...collection, 85 | wrapperExtractKeys, 86 | }); 87 | } else { 88 | rootDefinitions.set(collection.name, { 89 | ...collection, 90 | wrapperExtractKeys, 91 | }); 92 | } 93 | } 94 | 95 | for (const childDefinition of childDefinitions.values()) { 96 | const parentDefinition = rootDefinitions.get( 97 | childDefinition.parentCollectionName 98 | ); 99 | if (!parentDefinition) { 100 | throw new ConfigurationException( 101 | `Child collection ${childDefinition.name} refers to non-existent parent definition ${childDefinition.parentCollectionName}` 102 | ); 103 | } 104 | if (!isEqual(parentDefinition.layout, childDefinition.layout)) { 105 | throw new ConfigurationException( 106 | `Child collection ${childDefinition.name} must have same layout as parent definition ${parentDefinition.name}` 107 | ); 108 | } 109 | } 110 | 111 | return { 112 | ddb: dynamodb, 113 | definitions, 114 | rootDefinitions, 115 | childDefinitions, 116 | __TYPE__: 'DynaglueContext', 117 | }; 118 | } 119 | -------------------------------------------------------------------------------- /src/context/validators.test.ts: -------------------------------------------------------------------------------- 1 | import { validateFindKeys } from './validators'; 2 | import { ConfigurationException } from '../base/exceptions'; 3 | 4 | test('validateFindKeys throws on already used indexes', () => { 5 | const findKeys = [ 6 | { indexName: 'testi1', partitionKey: 'key1', sortKey: 'sort1' }, 7 | { indexName: 'testi1', partitionKey: 'key1', sortKey: 'sort1' }, 8 | ]; 9 | 10 | expect(() => validateFindKeys(findKeys)).toThrow(ConfigurationException); 11 | }); 12 | 13 | test('validateFindKeys passes on valid index configuration', () => { 14 | const findKeys = [ 15 | { indexName: 'testi1', partitionKey: 'key1', sortKey: 'sort1' }, 16 | { indexName: 'testi2', partitionKey: 'key2', sortKey: 'sort2' }, 17 | ]; 18 | 19 | expect(() => validateFindKeys(findKeys)).not.toThrow(ConfigurationException); 20 | }); 21 | -------------------------------------------------------------------------------- /src/context/validators.ts: -------------------------------------------------------------------------------- 1 | import { SecondaryIndexLayout } from '../base/layout'; 2 | import { ConfigurationException } from '../base/exceptions'; 3 | 4 | /** 5 | * @internal 6 | * 7 | * Validate that the find keys specified for an index layout are valid. 8 | */ 9 | export function validateFindKeys(findKeys: SecondaryIndexLayout[]): void { 10 | const alreadyDefinedIndexes: string[] = []; 11 | findKeys.forEach((findKey, index) => { 12 | if (alreadyDefinedIndexes.includes(findKey.indexName)) { 13 | throw new ConfigurationException( 14 | `find key at index ${index} has duplicate index reference ${findKey.indexName}` 15 | ); 16 | } 17 | alreadyDefinedIndexes.push(findKey.indexName); 18 | }); 19 | } 20 | -------------------------------------------------------------------------------- /src/debug/debugDynamo.ts: -------------------------------------------------------------------------------- 1 | import debug from 'debug'; 2 | 3 | /** @internal */ 4 | const logger = debug('dynaglue:dynamodb'); 5 | 6 | /** 7 | * @internal 8 | * 9 | * Helper for logging dynamo requests 10 | */ 11 | function debugDynamo(operation: string, request: unknown): void { 12 | logger('operation=%s request=%O', operation, request); 13 | } 14 | 15 | export default debugDynamo; 16 | -------------------------------------------------------------------------------- /src/debug/index.ts: -------------------------------------------------------------------------------- 1 | export * from './debugDynamo'; 2 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { createContext } from './context'; 2 | import type { Context } from './context'; 3 | export { 4 | findById, 5 | insert, 6 | find, 7 | deleteById, 8 | replace, 9 | findChildren, 10 | findChildById, 11 | deleteChildById, 12 | updateById, 13 | updateChildById, 14 | findByIdWithChildren, 15 | batchFindByIds, 16 | batchReplaceDelete, 17 | transactionWrite, 18 | } from './operations'; 19 | export type { Context }; 20 | export { createContext }; 21 | export type { 22 | PrimaryIndexLayout, 23 | SecondaryIndexLayout, 24 | CollectionLayout, 25 | } from './base/layout'; 26 | export type { 27 | AccessPattern, 28 | KeyPath, 29 | NormaliserFunction, 30 | AccessPatternOptions, 31 | } from './base/access_pattern'; 32 | export type { 33 | Collection, 34 | RootCollection, 35 | ChildCollection, 36 | } from './base/collection'; 37 | export { 38 | CollectionNotFoundException, 39 | ConfigurationException, 40 | ConflictException, 41 | IndexNotFoundException, 42 | InvalidBatchReplaceDeleteDescriptorException, 43 | InvalidCompositeConditionException, 44 | InvalidIdException, 45 | InvalidIndexedFieldValueException, 46 | InvalidParentIdException, 47 | InvalidQueryException, 48 | InvalidUpdateValueException, 49 | InvalidUpdatesException, 50 | } from './base/exceptions'; 51 | export type { 52 | WrappedDocument, 53 | DocumentWithId, 54 | DynamoDBSet, 55 | Key, 56 | } from './base/common'; 57 | export type { 58 | SetValuesDocument, 59 | Updates, 60 | SetChange, 61 | RemoveChange, 62 | AppendDeleteSetChange, 63 | AddValueChange, 64 | OperationUpdates, 65 | ChangesUpdates, 66 | } from './operations/update_by_id'; 67 | export type { 68 | FindQuery, 69 | FindOptions, 70 | FindResults, 71 | QueryOperator, 72 | } from './operations/find'; 73 | export type { FindByIdWithChildrenResult } from './operations/find_by_id_with_children'; 74 | export type { 75 | FindChildrenOptions, 76 | FindChildrenResults, 77 | KeyRangeExpression, 78 | } from './operations/find_children'; 79 | export type { 80 | AndCondition, 81 | OrCondition, 82 | NotCondition, 83 | KeyPathsAndClause, 84 | CompositeCondition, 85 | ComparisonCondition, 86 | EqCondition, 87 | NotEqCondition, 88 | GtCondition, 89 | LtCondition, 90 | GtEqCondition, 91 | LtEqCondition, 92 | BetweenCondition, 93 | InCondition, 94 | ExistsCondition, 95 | TypeCondition, 96 | BeginsWithCondition, 97 | ContainsCondition, 98 | ConditionValue, 99 | DynamoDBType, 100 | } from './base/conditions'; 101 | export type { 102 | BatchFindByIdDescriptor, 103 | BatchFindByIdsResponse, 104 | } from './operations/batch_find_by_ids'; 105 | export type { 106 | BatchReplaceDescriptor, 107 | BatchDeleteDescriptor, 108 | BatchReplaceDeleteDescriptor, 109 | BatchReplaceDeleteResponse, 110 | } from './operations/batch_replace_delete'; 111 | export type { 112 | TransactionDeleteRequest, 113 | TransactionDeleteChildRequest, 114 | TransactionReplaceRequest, 115 | TransactionWriteRequest, 116 | } from './operations/transact_write'; 117 | export type { ParseElement } from './base/conditions_types'; 118 | -------------------------------------------------------------------------------- /src/operations/batch_find_by_ids.ts: -------------------------------------------------------------------------------- 1 | import { Context } from '../context'; 2 | import { convertToAttr, unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { 4 | InvalidFindDescriptorException, 5 | InternalProcessingException, 6 | } from '../base/exceptions'; 7 | import { 8 | getChildCollection, 9 | getRootCollection, 10 | assemblePrimaryKeyValue, 11 | unwrap, 12 | getCollection, 13 | } from '../base/util'; 14 | import { DocumentWithId, Key, WrappedDocument } from '../base/common'; 15 | import { CollectionLayout } from '../base/layout'; 16 | import debugDynamo from '../debug/debugDynamo'; 17 | import { parseKey } from './batch_utils'; 18 | import { 19 | BatchGetItemCommand, 20 | KeysAndAttributes, 21 | } from '@aws-sdk/client-dynamodb'; 22 | 23 | /** 24 | * The collection and ID of a root or child 25 | * item to retrieve with #transactFindByIds 26 | */ 27 | export type BatchFindByIdDescriptor = { 28 | /** The collection containing the item */ 29 | collection: string; 30 | /* The ID of the item */ 31 | id: string; 32 | /* The parent ID of the item (if a child item) */ 33 | parentId?: string; 34 | }; 35 | 36 | /** @internal */ 37 | export type TableKeyTuple = [string, Key]; 38 | 39 | /** 40 | * The response to a [[batchFindByIds]] request. The 41 | * retrieved documents are stored in a map, organised by 42 | * collection name. 43 | * 44 | * Any unprocessed request keys are included in the 45 | * unprocessedDescriptors list. You will need to submit 46 | * another request to obtain these. 47 | */ 48 | export type BatchFindByIdsResponse = { 49 | documentsByCollection: { 50 | [collection: string]: DocumentWithId[]; 51 | }; 52 | 53 | /** 54 | * Unprocessed keys - you will need to submit 55 | * another request for these subsequently 56 | */ 57 | unprocessedDescriptors: BatchFindByIdDescriptor[]; 58 | }; 59 | 60 | /** 61 | * Find multiple items by their primary key 62 | * in bulk. May specify items over multiple 63 | * tables and collections. 64 | * 65 | * @category Batch Operations 66 | */ 67 | export const batchFindByIds = async ( 68 | ctx: Context, 69 | items: BatchFindByIdDescriptor[], 70 | options?: { 71 | consistentReadTableNames?: string[]; 72 | } 73 | ): Promise => { 74 | if (items.length === 0) { 75 | throw new InvalidFindDescriptorException( 76 | 'At least one find descriptor must be specified' 77 | ); 78 | } else if (items.length > 100) { 79 | throw new InvalidFindDescriptorException( 80 | 'No more than 100 find descriptors can be specified to batchFindByIds' 81 | ); 82 | } 83 | 84 | // Map tables to their 'predicted' layouts - this is needed 85 | // because of the abject uselessness of the UnprocessedKeys structure, which 86 | // returns unprocessed items in random order arranged only 87 | // by table name. 88 | // 89 | // Because we could be accessing multiple tables, we need 90 | // the layout to discern the key names when parsing the 91 | // UnprocessedKeys map. 92 | const tableLayoutMapping = new Map(); 93 | 94 | const tableRequestItemTuples: TableKeyTuple[] = items.map( 95 | ({ collection, id, parentId }) => { 96 | const collectionDefinition = parentId 97 | ? getChildCollection(ctx, collection) 98 | : getRootCollection(ctx, collection); 99 | tableLayoutMapping.set( 100 | collectionDefinition.layout.tableName, 101 | collectionDefinition.layout 102 | ); 103 | const { 104 | layout: { tableName, primaryKey, indexKeySeparator }, 105 | } = collectionDefinition; 106 | return [ 107 | tableName, 108 | { 109 | [primaryKey.partitionKey]: convertToAttr( 110 | assemblePrimaryKeyValue( 111 | collectionDefinition.type === 'child' 112 | ? collectionDefinition.parentCollectionName 113 | : collectionDefinition.name, 114 | parentId ? parentId : id, 115 | indexKeySeparator 116 | ), 117 | { convertEmptyValues: false } 118 | ), 119 | [primaryKey.sortKey]: convertToAttr( 120 | assemblePrimaryKeyValue(collection, id, indexKeySeparator), 121 | { convertEmptyValues: false } 122 | ), 123 | }, 124 | ]; 125 | } 126 | ); 127 | 128 | const requestItems = tableRequestItemTuples.reduce((req, tuple) => { 129 | const keyAndsAttrs = req[tuple[0]] ?? { 130 | ConsistentRead: (options?.consistentReadTableNames ?? []).includes( 131 | tuple[0] 132 | ), 133 | Keys: [], 134 | }; 135 | req[tuple[0]] = keyAndsAttrs; 136 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 137 | keyAndsAttrs.Keys!.push(tuple[1]); 138 | return req; 139 | }, {} as { [collection: string]: KeysAndAttributes }); 140 | 141 | const request = { RequestItems: requestItems }; 142 | debugDynamo('BatchGetItem', request); 143 | const command = new BatchGetItemCommand(request); 144 | const { Responses = {}, UnprocessedKeys = {} } = await ctx.ddb.send(command); 145 | 146 | const response: BatchFindByIdsResponse = { 147 | documentsByCollection: {}, 148 | unprocessedDescriptors: [], 149 | }; 150 | for (const items of Object.values(Responses)) { 151 | for (const item of items) { 152 | const unmarshalled = unmarshall(item) as WrappedDocument; 153 | const collection = getCollection(ctx, unmarshalled.type); 154 | const document = unwrap(unmarshalled); 155 | const collectionMap = 156 | response.documentsByCollection[collection.name] ?? []; 157 | response.documentsByCollection[collection.name] = collectionMap; 158 | collectionMap.push(document); 159 | } 160 | } 161 | 162 | for (const [tableName, unprocessed] of Object.entries(UnprocessedKeys)) { 163 | const tableMapping = tableLayoutMapping.get(tableName); 164 | if (!tableMapping) { 165 | throw new InternalProcessingException( 166 | `Could not find table mapping for ${tableName} while parsing UnprocessedKeys` 167 | ); 168 | } 169 | for (const key of unprocessed.Keys ?? []) { 170 | const descriptor = parseKey(tableMapping, key); 171 | response.unprocessedDescriptors.push(descriptor); 172 | } 173 | } 174 | return response; 175 | }; 176 | -------------------------------------------------------------------------------- /src/operations/batch_replace_delete.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BatchWriteItemInput, 3 | WriteRequest, 4 | BatchWriteItemCommand, 5 | } from '@aws-sdk/client-dynamodb'; 6 | import { marshall, unmarshall } from '@aws-sdk/util-dynamodb'; 7 | import { DocumentWithId, WrappedDocument } from '../base/common'; 8 | import { 9 | getCollection, 10 | toWrapped, 11 | unwrap, 12 | assemblePrimaryKeyValue, 13 | } from '../base/util'; 14 | import { Context } from '../context'; 15 | import debugDynamo from '../debug/debugDynamo'; 16 | import { parseKey } from './batch_utils'; 17 | import { CollectionLayout } from '../base/layout'; 18 | import { 19 | InternalProcessingException, 20 | InvalidBatchReplaceDeleteDescriptorException, 21 | } from '../base/exceptions'; 22 | 23 | /** 24 | * A replace (PutItem) request to perform with 25 | * [[batchReplaceDelete]]. 26 | * 27 | * Performs the equivalent operation as [[replace]], 28 | * which is a full insert or update on the primary 29 | * key of the document. 30 | */ 31 | export type BatchReplaceDescriptor = { 32 | /** The root or child collection */ 33 | collection: string; 34 | /** The operation to perform (always `'replace'`) */ 35 | op: 'replace'; 36 | /** The document of the item to be replaced */ 37 | replaceItem: DocumentWithId; 38 | }; 39 | 40 | /** 41 | * A replace (DeleteItem) request to perform with 42 | * [[batchReplaceDelete]]. 43 | * 44 | * Performs the equivalent operation as [[deleteById]]/[[deleteChildById][, 45 | * which is a full delete on the primary key of a document 46 | */ 47 | export type BatchDeleteDescriptor = { 48 | /** The root or child collection */ 49 | collection: string; 50 | /** The operation to perform */ 51 | op: 'delete'; 52 | /** The item identifier */ 53 | id: string; 54 | /** 55 | * For child items, this is the parent object identifier 56 | * (mandatory for root 57 | * items, left blank for child items). 58 | */ 59 | parentId?: string; 60 | }; 61 | 62 | /** 63 | * A replace (PutItem) or delete (DeleteItem) request 64 | * to perform with [[batchReplaceDelete]] 65 | */ 66 | export type BatchReplaceDeleteDescriptor = 67 | | BatchReplaceDescriptor 68 | | BatchDeleteDescriptor; 69 | 70 | /** @internal */ 71 | type TableRequestItemTuple = [string, WriteRequest]; 72 | 73 | /** 74 | * Response from {@link batchReplaceDelete}. Contains 75 | * unprocessed items. 76 | */ 77 | export type BatchReplaceDeleteResponse = { 78 | /** 79 | * The items that were not processed in this call. You will 80 | * need to resubmit them to the API in a subsequent call. 81 | */ 82 | unprocessedDescriptors: BatchReplaceDeleteDescriptor[]; 83 | }; 84 | 85 | /** 86 | * Replace multiple items in bulk (effectively a bulk 87 | * `replaceItem()` call). 88 | * 89 | * You can update multiple items across different 90 | * tables and collections in the same call. 91 | * 92 | * @category Batch Operations 93 | */ 94 | export const batchReplaceDelete = async ( 95 | ctx: Context, 96 | descriptors: BatchReplaceDeleteDescriptor[] 97 | ): Promise => { 98 | if (descriptors.length === 0) { 99 | throw new InvalidBatchReplaceDeleteDescriptorException( 100 | 'You must specify at least one replace or delete descriptor' 101 | ); 102 | } 103 | // Because we could be accessing multiple tables, we need 104 | // the layout to discern the key names when parsing the 105 | // UnprocessedKeys map. 106 | const tableLayoutMapping = new Map(); 107 | 108 | const tableRequestItemTuples: TableRequestItemTuple[] = descriptors.map( 109 | (descriptor) => { 110 | const collection = getCollection(ctx, descriptor.collection); 111 | tableLayoutMapping.set(collection.layout.tableName, collection.layout); 112 | let request: WriteRequest; 113 | if (descriptor.op === 'replace') { 114 | // replace item 115 | request = { 116 | PutRequest: { 117 | Item: marshall(toWrapped(collection, descriptor.replaceItem), { 118 | convertEmptyValues: false, 119 | }), 120 | }, 121 | }; 122 | } else { 123 | if (collection.type === 'child' && !descriptor.parentId) { 124 | throw new InvalidBatchReplaceDeleteDescriptorException( 125 | 'BatchDeleteDescriptor must specify parentId for child collections', 126 | { collectionName: descriptor.collection, id: descriptor.id } 127 | ); 128 | } else if (collection.type !== 'child' && descriptor.parentId) { 129 | throw new InvalidBatchReplaceDeleteDescriptorException( 130 | 'BatchDeleteDescriptor must not specify parentId for root collections', 131 | { collectionname: descriptor.collection, id: descriptor.id } 132 | ); 133 | } 134 | const partitionKeyValue = assemblePrimaryKeyValue( 135 | collection.type === 'child' 136 | ? collection.parentCollectionName 137 | : collection.name, 138 | descriptor.parentId ?? descriptor.id, 139 | collection.layout.indexKeySeparator 140 | ); 141 | const sortKeyValue = assemblePrimaryKeyValue( 142 | collection.name, 143 | descriptor.id, 144 | collection.layout.indexKeySeparator 145 | ); 146 | request = { 147 | DeleteRequest: { 148 | Key: marshall( 149 | { 150 | [collection.layout.primaryKey.partitionKey]: partitionKeyValue, 151 | [collection.layout.primaryKey.sortKey]: sortKeyValue, 152 | }, 153 | { convertEmptyValues: false, removeUndefinedValues: true } 154 | ), 155 | }, 156 | }; 157 | } 158 | return [collection.layout.tableName, request]; 159 | } 160 | ); 161 | 162 | const requestItems = tableRequestItemTuples.reduce( 163 | (riMap, [table, request]) => { 164 | const items = riMap[table] ?? []; 165 | riMap[table] = items; 166 | items.push(request); 167 | return riMap; 168 | }, 169 | {} as { [key: string]: WriteRequest[] }, 170 | ); 171 | 172 | const request: BatchWriteItemInput = { 173 | RequestItems: requestItems, 174 | }; 175 | 176 | debugDynamo('BatchWriteItem', request); 177 | const command = new BatchWriteItemCommand(request); 178 | const { UnprocessedItems = {} } = await ctx.ddb.send(command); 179 | 180 | const unprocessedDescriptors: BatchReplaceDeleteDescriptor[] = []; 181 | for (const [tableName, unprocessed] of Object.entries(UnprocessedItems)) { 182 | const tableMapping = tableLayoutMapping.get(tableName); 183 | if (!tableMapping) { 184 | throw new InternalProcessingException( 185 | `Could not find table mapping for ${tableName} while parsing UnprocessedKeys` 186 | ); 187 | } 188 | for (const item of unprocessed) { 189 | const { PutRequest, DeleteRequest } = item; 190 | if (PutRequest?.Item) { 191 | const key = parseKey(tableMapping, PutRequest.Item); 192 | const wrapped = unmarshall( 193 | PutRequest.Item 194 | ) as WrappedDocument; 195 | const document = unwrap(wrapped); 196 | unprocessedDescriptors.push({ 197 | op: 'replace', 198 | replaceItem: document, 199 | collection: key.collection, 200 | }); 201 | } else if (DeleteRequest?.Key) { 202 | const key = parseKey(tableMapping, DeleteRequest.Key); 203 | unprocessedDescriptors.push({ 204 | op: 'delete', 205 | ...key, 206 | }); 207 | } else { 208 | throw new Error('Unknown unprocessed item: ' + JSON.stringify(item)); 209 | } 210 | } 211 | } 212 | return { 213 | unprocessedDescriptors, 214 | }; 215 | }; 216 | -------------------------------------------------------------------------------- /src/operations/batch_utils.ts: -------------------------------------------------------------------------------- 1 | import { convertToNative } from '@aws-sdk/util-dynamodb'; 2 | import { Key } from '../base/common'; 3 | import { InternalProcessingException } from '../base/exceptions'; 4 | import { CollectionLayout } from '../base/layout'; 5 | import { SEPARATOR } from '../base/util'; 6 | 7 | /** 8 | * @internal 9 | * 10 | * The ID of a root or child item. 11 | * 12 | */ 13 | export type ItemIdDescriptor = { 14 | /** The collection containing the item */ 15 | collection: string; 16 | /* The ID of the item */ 17 | id: string; 18 | /* The parent ID of the item (if a child item) */ 19 | parentId?: string; 20 | }; 21 | 22 | /** 23 | * @internal 24 | * 25 | * Parse a dynaglue key. For use with BatchGetItem/BatchWriteItem interactions, 26 | * which return unprocessed items in a random order, requiring us to disassemble 27 | * our request in order to work out what to resubmit. 28 | * 29 | */ 30 | export const parseKey = ( 31 | layout: CollectionLayout, 32 | key: Key 33 | ): ItemIdDescriptor => { 34 | const partitionKey = convertToNative(key[layout.primaryKey.partitionKey]); 35 | const sortKey = convertToNative(key[layout.primaryKey.sortKey]); 36 | // The following checks are for sanity and should not occur in any real application that 37 | // has setup the layout correctly 38 | if (!partitionKey || !sortKey) 39 | throw new InternalProcessingException( 40 | `Selected layout could not find key names ${JSON.stringify( 41 | key 42 | )} for table ${layout.tableName}` 43 | ); 44 | if (typeof partitionKey !== 'string') 45 | throw new InternalProcessingException( 46 | `Partition key ${layout.primaryKey.partitionKey} for table ${layout.tableName} is not a string` 47 | ); 48 | if (typeof sortKey !== 'string') 49 | throw new InternalProcessingException( 50 | `Sort key ${layout.primaryKey.sortKey} for table ${layout.tableName} is not a string` 51 | ); 52 | const [, parentId] = partitionKey.split( 53 | layout.indexKeySeparator ?? SEPARATOR, 54 | 2 55 | ); 56 | const [childCollection, id] = sortKey.split( 57 | layout.indexKeySeparator ?? SEPARATOR, 58 | 2 59 | ); 60 | return { 61 | collection: childCollection, 62 | parentId: parentId !== id ? parentId : undefined, 63 | id: id, 64 | }; 65 | }; 66 | -------------------------------------------------------------------------------- /src/operations/delete_by_id.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { marshall } from '@aws-sdk/util-dynamodb'; 3 | import { CollectionNotFoundException } from '../base/exceptions'; 4 | import { deleteById } from './delete_by_id'; 5 | import { createContext } from '../context'; 6 | import { createDynamoMock } from '../../testutil/dynamo_mock'; 7 | 8 | describe('deleteById', () => { 9 | const layout = { 10 | tableName: 'testtable', 11 | primaryKey: { partitionKey: 'id', sortKey: 'collection' }, 12 | }; 13 | 14 | const collection = { 15 | name: 'test-collection', 16 | layout, 17 | }; 18 | 19 | test('throws when the collection does not exist', () => { 20 | const context = createContext({} as DynamoDBClient, [collection]); 21 | return expect( 22 | deleteById(context, 'not-a-collection', 'idvalue') 23 | ).rejects.toThrowError(CollectionNotFoundException); 24 | }); 25 | 26 | test('returns undefined when there is no old value', async () => { 27 | const mock = createDynamoMock('deleteItem', {}); 28 | const context = createContext(mock as unknown as DynamoDBClient, [collection]); 29 | const result = await deleteById(context, 'test-collection', 'idvalue'); 30 | 31 | expect(mock.send.mock.calls[0][0].input).toEqual({ 32 | TableName: 'testtable', 33 | Key: { 34 | id: { S: 'test-collection|-|idvalue' }, 35 | collection: { S: 'test-collection|-|idvalue' }, 36 | }, 37 | ReturnValues: 'ALL_OLD', 38 | }); 39 | expect(result).toBeUndefined(); 40 | }); 41 | 42 | test('returns old value when it is returned', async () => { 43 | const value = { 44 | _id: 'idvalue', 45 | name: 'Test Name', 46 | }; 47 | 48 | const mock = createDynamoMock('deleteItem', { 49 | Attributes: marshall({ value }, { convertEmptyValues: false, removeUndefinedValues: true }), 50 | }); 51 | const context = createContext(mock as unknown as DynamoDBClient, [collection]); 52 | const result = await deleteById(context, 'test-collection', 'idvalue'); 53 | 54 | expect(mock.send.mock.calls[0][0].input).toEqual({ 55 | TableName: 'testtable', 56 | Key: { 57 | id: { S: 'test-collection|-|idvalue' }, 58 | collection: { S: 'test-collection|-|idvalue' }, 59 | }, 60 | ReturnValues: 'ALL_OLD', 61 | }); 62 | expect(result).toEqual(value); 63 | }); 64 | 65 | test('works with custom separators', async () => { 66 | const mock = createDynamoMock('deleteItem', {}); 67 | const customCollection = { 68 | ...collection, 69 | layout: { ...layout, indexKeySeparator: '#' }, 70 | }; 71 | const context = createContext(mock as unknown as DynamoDBClient, [ 72 | customCollection, 73 | ]); 74 | await deleteById(context, 'test-collection', 'idvalue'); 75 | 76 | expect(mock.send.mock.calls[0][0].input).toEqual({ 77 | TableName: 'testtable', 78 | Key: { 79 | id: { S: 'test-collection#idvalue' }, 80 | collection: { S: 'test-collection#idvalue' }, 81 | }, 82 | ReturnValues: 'ALL_OLD', 83 | }); 84 | }); 85 | }); 86 | -------------------------------------------------------------------------------- /src/operations/delete_by_id.ts: -------------------------------------------------------------------------------- 1 | import { DeleteItemCommand, DeleteItemInput } from '@aws-sdk/client-dynamodb'; 2 | import { marshall, unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { Context } from '../context'; 4 | import { 5 | unwrap, 6 | assemblePrimaryKeyValue, 7 | getRootCollection, 8 | } from '../base/util'; 9 | import { DocumentWithId, WrappedDocument } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | import { CompositeCondition } from '../base/conditions'; 12 | import { createNameMapper, createValueMapper } from '../base/mappers'; 13 | import { parseCompositeCondition } from '../base/conditions_parser'; 14 | 15 | /** 16 | * Delete a root object using its `_id` field 17 | * 18 | * @category Mutation 19 | * 20 | * @param context the context object 21 | * @param collectionName the name of the collection 22 | * @param id the object to remove 23 | * @param options options to apply 24 | * @param options.condition an optional conditional expression that must be satifisfied for the update to proceed 25 | * @returns the deleted object (as stored in the database), or 26 | * `undefined` if not found 27 | * @throws {CollectionNotFoundException} when the collection is not found in the context 28 | */ 29 | export async function deleteById( 30 | context: Context, 31 | collectionName: string, 32 | id: string, 33 | options: { condition?: CompositeCondition } = {} 34 | ): Promise { 35 | const request: DeleteItemInput = createDeleteByIdRequest( 36 | context, 37 | collectionName, 38 | id, 39 | options 40 | ); 41 | 42 | debugDynamo('DeleteItem', request); 43 | 44 | const command = new DeleteItemCommand(request); 45 | const result = await context.ddb.send(command); 46 | 47 | if (result.Attributes) { 48 | const wrapped = unmarshall(result.Attributes); 49 | return unwrap(wrapped as WrappedDocument); 50 | } 51 | 52 | return undefined; 53 | } 54 | 55 | /** 56 | * Create a delete request using its `_id` field 57 | * 58 | * @category Mutation 59 | * 60 | * @param context the context object 61 | * @param collectionName the name of the collection 62 | * @param id the object to remove 63 | * @param options options to apply 64 | * @param options.condition an optional conditional expression that must be satifisfied for the update to proceed 65 | * @returns the delete request as @see {DeleteItemInput} 66 | * @throws {CollectionNotFoundException} when the collection is not found in the context 67 | */ 68 | export const createDeleteByIdRequest = ( 69 | context: Context, 70 | collectionName: string, 71 | id: string, 72 | options: { condition?: CompositeCondition } = {} 73 | ): DeleteItemInput => { 74 | const collection = getRootCollection(context, collectionName); 75 | const nameMapper = createNameMapper(); 76 | const valueMapper = createValueMapper(); 77 | let conditionExpression; 78 | if (options.condition) { 79 | conditionExpression = parseCompositeCondition(options.condition, { 80 | nameMapper, 81 | valueMapper, 82 | parsePath: [], 83 | }); 84 | } 85 | const request: DeleteItemInput = { 86 | TableName: collection.layout.tableName, 87 | Key: marshall( 88 | { 89 | [collection.layout.primaryKey.partitionKey]: assemblePrimaryKeyValue( 90 | collectionName, 91 | id, 92 | collection.layout.indexKeySeparator 93 | ), 94 | [collection.layout.primaryKey.sortKey]: assemblePrimaryKeyValue( 95 | collectionName, 96 | id, 97 | collection.layout.indexKeySeparator 98 | ), 99 | }, 100 | { convertEmptyValues: false, removeUndefinedValues: true } 101 | ), 102 | ReturnValues: 'ALL_OLD', 103 | ConditionExpression: conditionExpression, 104 | ExpressionAttributeNames: nameMapper.get(), 105 | ExpressionAttributeValues: valueMapper.get(), 106 | }; 107 | 108 | return request; 109 | }; 110 | -------------------------------------------------------------------------------- /src/operations/delete_child_by_id.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { CollectionNotFoundException } from '../base/exceptions'; 3 | import { deleteChildById } from './delete_child_by_id'; 4 | import { createContext } from '../context'; 5 | import { createDynamoMock } from '../../testutil/dynamo_mock'; 6 | import { Collection } from '../base/collection'; 7 | import { marshall } from '@aws-sdk/util-dynamodb'; 8 | 9 | describe('deleteChildById', () => { 10 | const layout = { 11 | tableName: 'testtable', 12 | primaryKey: { partitionKey: 'id', sortKey: 'collection' }, 13 | }; 14 | 15 | const rootCollection: Collection = { 16 | name: 'root-collection', 17 | layout, 18 | }; 19 | 20 | const childCollection: Collection = { 21 | name: 'test-collection', 22 | type: 'child', 23 | layout, 24 | foreignKeyPath: ['rootId'], 25 | parentCollectionName: 'root-collection', 26 | }; 27 | 28 | test('throws when the collection does not exist', () => { 29 | const context = createContext({} as DynamoDBClient, [ 30 | rootCollection, 31 | childCollection, 32 | ]); 33 | return expect( 34 | deleteChildById(context, 'not-a-collection', 'idvalue', 'rootid') 35 | ).rejects.toThrowError(CollectionNotFoundException); 36 | }); 37 | 38 | test('returns undefined when there is no old value', async () => { 39 | const mock = createDynamoMock('deleteItem', {}); 40 | const context = createContext(mock as unknown as DynamoDBClient, [ 41 | rootCollection, 42 | childCollection, 43 | ]); 44 | const result = await deleteChildById( 45 | context, 46 | 'test-collection', 47 | 'idvalue', 48 | 'rootid' 49 | ); 50 | 51 | expect(mock.send.mock.calls[0][0].input).toEqual({ 52 | TableName: 'testtable', 53 | Key: { 54 | id: { S: 'root-collection|-|rootid' }, 55 | collection: { S: 'test-collection|-|idvalue' }, 56 | }, 57 | ReturnValues: 'ALL_OLD', 58 | }); 59 | expect(result).toBeUndefined(); 60 | }); 61 | 62 | test('returns old value when it is returned', async () => { 63 | const value = { 64 | _id: 'idvalue', 65 | name: 'Test Name', 66 | }; 67 | 68 | const mock = createDynamoMock('deleteItem', { 69 | Attributes: marshall({ value }, { convertEmptyValues: false, removeUndefinedValues: true }), 70 | }); 71 | const context = createContext(mock as unknown as DynamoDBClient, [ 72 | rootCollection, 73 | childCollection, 74 | ]); 75 | const result = await deleteChildById( 76 | context, 77 | 'test-collection', 78 | 'idvalue', 79 | 'rootid' 80 | ); 81 | 82 | expect(mock.send.mock.calls[0][0].input).toEqual({ 83 | TableName: 'testtable', 84 | Key: { 85 | id: { S: 'root-collection|-|rootid' }, 86 | collection: { S: 'test-collection|-|idvalue' }, 87 | }, 88 | ReturnValues: 'ALL_OLD', 89 | }); 90 | expect(result).toEqual(value); 91 | }); 92 | 93 | test('works with custom separators', async () => { 94 | const mock = createDynamoMock('deleteItem', {}); 95 | const customLayout = { ...layout, indexKeySeparator: '#' }; 96 | const customRootCollection = { ...rootCollection, layout: customLayout }; 97 | const customChildCollection = { ...childCollection, layout: customLayout }; 98 | const context = createContext(mock as unknown as DynamoDBClient, [ 99 | customRootCollection, 100 | customChildCollection, 101 | ]); 102 | await deleteChildById(context, 'test-collection', 'idvalue', 'rootid'); 103 | 104 | expect(mock.send.mock.calls[0][0].input).toEqual({ 105 | TableName: 'testtable', 106 | Key: { 107 | id: { S: 'root-collection#rootid' }, 108 | collection: { S: 'test-collection#idvalue' }, 109 | }, 110 | ReturnValues: 'ALL_OLD', 111 | }); 112 | }); 113 | }); 114 | -------------------------------------------------------------------------------- /src/operations/delete_child_by_id.ts: -------------------------------------------------------------------------------- 1 | import { DeleteItemInput, DeleteItemCommand } from '@aws-sdk/client-dynamodb'; 2 | import { marshall, unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { Context } from '../context'; 4 | import { 5 | unwrap, 6 | assemblePrimaryKeyValue, 7 | getChildCollection, 8 | } from '../base/util'; 9 | import { WrappedDocument, DocumentWithId } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | import { createNameMapper, createValueMapper } from '../base/mappers'; 12 | import { CompositeCondition } from '../base/conditions'; 13 | import { parseCompositeCondition } from '../base/conditions_parser'; 14 | 15 | /** 16 | * Delete a child object using its `_id` field and its parents `_id`. 17 | * 18 | * @category Mutation 19 | * 20 | * @param context the context object 21 | * @param collectionName the name of the collection 22 | * @param id the child object to remove 23 | * @param rootObjectId the parent object id 24 | * @param options options to change behaviour of DynamoDB 25 | * @param options.condition a condition expression blocking the delete operation 26 | * @returns the deleted object (as stored in the database), or 27 | * `undefined` if not found 28 | * @throws {CollectionNotFoundException} when the collection is not found in the context 29 | */ 30 | export async function deleteChildById( 31 | context: Context, 32 | collectionName: string, 33 | id: string, 34 | rootObjectId: string, 35 | options: { condition?: CompositeCondition } = {} 36 | ): Promise { 37 | const request: DeleteItemInput = createDeleteChildByIdRequest( 38 | context, 39 | collectionName, 40 | id, 41 | rootObjectId, 42 | options 43 | ); 44 | 45 | debugDynamo('DeleteItem', request); 46 | 47 | const command = new DeleteItemCommand(request); 48 | const result = await context.ddb.send(command); 49 | if (result.Attributes) { 50 | const wrapped = unmarshall(result.Attributes); 51 | return unwrap(wrapped as WrappedDocument); 52 | } 53 | return undefined; 54 | } 55 | 56 | /** 57 | * Create a delete child request using its `_id` field and its parents `_id` 58 | * 59 | * @category Mutation 60 | * 61 | * @param context the context object 62 | * @param collectionName the name of the collection 63 | * @param id the child object to remove 64 | * @param rootObjectId the parent object id 65 | * @param options options to apply 66 | * @param options.condition a condition expression blocking the delete operation 67 | * @returns the deleted request as @see {DeleteItemInput} 68 | * @throws {CollectionNotFoundException} when the collection is not found in the context 69 | */ 70 | export const createDeleteChildByIdRequest = ( 71 | context: Context, 72 | collectionName: string, 73 | id: string, 74 | rootObjectId: string, 75 | options: { condition?: CompositeCondition } = {} 76 | ): DeleteItemInput => { 77 | const collection = getChildCollection(context, collectionName); 78 | 79 | const nameMapper = createNameMapper(); 80 | const valueMapper = createValueMapper(); 81 | 82 | let conditionExpression; 83 | if (options.condition) { 84 | conditionExpression = parseCompositeCondition(options.condition, { 85 | nameMapper, 86 | valueMapper, 87 | parsePath: [], 88 | }); 89 | } 90 | const request: DeleteItemInput = { 91 | TableName: collection.layout.tableName, 92 | Key: marshall( 93 | { 94 | [collection.layout.primaryKey.partitionKey]: assemblePrimaryKeyValue( 95 | collection.parentCollectionName, 96 | rootObjectId, 97 | collection.layout.indexKeySeparator 98 | ), 99 | [collection.layout.primaryKey.sortKey]: assemblePrimaryKeyValue( 100 | collectionName, 101 | id, 102 | collection.layout.indexKeySeparator 103 | ), 104 | }, 105 | { convertEmptyValues: false, removeUndefinedValues: true } 106 | ), 107 | ReturnValues: 'ALL_OLD', 108 | ConditionExpression: conditionExpression, 109 | ExpressionAttributeNames: nameMapper.get(), 110 | ExpressionAttributeValues: valueMapper.get(), 111 | }; 112 | 113 | return request; 114 | }; 115 | -------------------------------------------------------------------------------- /src/operations/find_by_id.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { marshall } from '@aws-sdk/util-dynamodb'; 3 | import { createDynamoMock } from '../../testutil/dynamo_mock'; 4 | import { createContext } from '../context'; 5 | import { findById } from './find_by_id'; 6 | 7 | describe('findById', () => { 8 | const layout = { 9 | tableName: 'testtable', 10 | primaryKey: { partitionKey: 'id', sortKey: 'collection' }, 11 | }; 12 | 13 | const collection = { 14 | name: 'test-collection', 15 | layout, 16 | }; 17 | 18 | test('returns undefined when it cannot find a value', async () => { 19 | const getItemReturnValue = {}; 20 | const ddb = createDynamoMock('getItem', getItemReturnValue); 21 | const context = createContext(ddb as unknown as DynamoDBClient, [collection]); 22 | expect( 23 | await findById(context, 'test-collection', 'test-id1') 24 | ).toBeUndefined(); 25 | 26 | expect(ddb.send.mock.calls[0][0].input).toEqual({ 27 | TableName: 'testtable', 28 | Key: { 29 | id: { S: 'test-collection|-|test-id1' }, 30 | collection: { S: 'test-collection|-|test-id1' }, 31 | }, 32 | }); 33 | }); 34 | 35 | test('returns the unwrapped value when it exists', async () => { 36 | const item = { 37 | value: { 38 | _id: 'test-id1', 39 | location: { 40 | lat: 123, 41 | lon: 456.78, 42 | }, 43 | }, 44 | }; 45 | const getItemReturnValue = { 46 | Item: marshall(item), 47 | }; 48 | const ddb = createDynamoMock('getItem', getItemReturnValue); 49 | const context = createContext(ddb as unknown as DynamoDBClient, [collection]); 50 | 51 | expect(await findById(context, 'test-collection', 'test-id1')).toEqual( 52 | item.value 53 | ); 54 | 55 | expect(ddb.send.mock.calls[0][0].input).toEqual({ 56 | TableName: 'testtable', 57 | Key: { 58 | id: { S: 'test-collection|-|test-id1' }, 59 | collection: { S: 'test-collection|-|test-id1' }, 60 | }, 61 | }); 62 | }); 63 | 64 | test('works with custom separators correctly', async () => { 65 | const getItemReturnValue = {}; 66 | const ddb = createDynamoMock('getItem', getItemReturnValue); 67 | const customCollection = { 68 | ...collection, 69 | layout: { ...layout, indexKeySeparator: '@' }, 70 | }; 71 | const context = createContext(ddb as unknown as DynamoDBClient, [ 72 | customCollection, 73 | ]); 74 | await findById(context, 'test-collection', 'test-id1'); 75 | 76 | expect(ddb.send.mock.calls[0][0].input).toEqual({ 77 | TableName: 'testtable', 78 | Key: { 79 | id: { S: 'test-collection@test-id1' }, 80 | collection: { S: 'test-collection@test-id1' }, 81 | }, 82 | }); 83 | }); 84 | }); 85 | -------------------------------------------------------------------------------- /src/operations/find_by_id.ts: -------------------------------------------------------------------------------- 1 | import { GetItemCommand, GetItemInput } from '@aws-sdk/client-dynamodb'; 2 | import { marshall, unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { Context } from '../context'; 4 | import { 5 | unwrap, 6 | assemblePrimaryKeyValue, 7 | getRootCollection, 8 | } from '../base/util'; 9 | import { DocumentWithId, WrappedDocument } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | 12 | /** 13 | * Retrieve a top-level document by its `_id` field. 14 | * 15 | * @category Query 16 | * 17 | * @param context the context object 18 | * @param collectionName name of the collection to search 19 | * @param id the `_id` value of the root document 20 | * @param options the the set of options for the search 21 | * @param options.consistentRead search with strongly consistent reads 22 | * @returns the stored value, or `undefined` if not found 23 | * @throws {CollectionNotFoundException} when the collection is not found in the context 24 | */ 25 | export async function findById( 26 | context: Context, 27 | collectionName: string, 28 | id: string, 29 | options?: { 30 | consistentRead?: boolean; 31 | } 32 | ): Promise { 33 | const collection = getRootCollection(context, collectionName); 34 | const request: GetItemInput = { 35 | TableName: collection.layout.tableName, 36 | ConsistentRead: options?.consistentRead, 37 | Key: marshall( 38 | { 39 | [collection.layout.primaryKey.partitionKey]: assemblePrimaryKeyValue( 40 | collectionName, 41 | id, 42 | collection.layout.indexKeySeparator 43 | ), 44 | [collection.layout.primaryKey.sortKey]: assemblePrimaryKeyValue( 45 | collectionName, 46 | id, 47 | collection.layout.indexKeySeparator 48 | ), 49 | }, 50 | { convertEmptyValues: false, removeUndefinedValues: true } 51 | ), 52 | }; 53 | debugDynamo('GetItem', request); 54 | const command = new GetItemCommand(request); 55 | const result = await context.ddb.send(command); 56 | if (result.Item) { 57 | const wrapped = unmarshall(result.Item); 58 | return unwrap(wrapped as WrappedDocument); 59 | } 60 | return undefined; 61 | } 62 | -------------------------------------------------------------------------------- /src/operations/find_by_id_with_children.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { marshall } from '@aws-sdk/util-dynamodb'; 3 | import { SEPARATOR, toWrapped } from '../base/util'; 4 | import { Collection } from '../base/collection'; 5 | import { CollectionLayout } from '../base/layout'; 6 | import { createContext } from '../context'; 7 | import { createDynamoMock } from '../../testutil/dynamo_mock'; 8 | import { findByIdWithChildren } from './find_by_id_with_children'; 9 | import { CollectionNotFoundException } from '../base/exceptions'; 10 | import { CollectionDefinition } from '../base/collection_definition'; 11 | 12 | describe('findByIdWithChildren', () => { 13 | const layout: CollectionLayout = { 14 | tableName: 'global', 15 | primaryKey: { 16 | partitionKey: 'pk', 17 | sortKey: 'sk', 18 | }, 19 | }; 20 | 21 | const rootCollection = { 22 | name: 'users', 23 | layout, 24 | }; 25 | 26 | const addressesCollection = { 27 | name: 'addresses', 28 | parentCollectionName: 'users', 29 | layout, 30 | type: 'child', 31 | foreignKeyPath: ['userId'], 32 | }; 33 | const profilesCollection = { 34 | name: 'profiles', 35 | parentCollectionName: 'users', 36 | layout, 37 | type: 'child', 38 | foreignKeyPath: ['userId'], 39 | }; 40 | 41 | const teamsCollection = { 42 | name: 'teams', 43 | layout, 44 | }; 45 | 46 | const allCollections: Collection[] = [ 47 | rootCollection, 48 | addressesCollection, 49 | profilesCollection, 50 | teamsCollection, 51 | ]; 52 | 53 | it('should fail if one of the specified child collections does not have the root as its parent', async () => { 54 | const dc = createDynamoMock('query', {}); 55 | const ctx = createContext(dc as unknown as DynamoDBClient, allCollections); 56 | await expect( 57 | findByIdWithChildren(ctx, 'users', 'testid', ['addresses', 'teams']) 58 | ).rejects.toThrowError(CollectionNotFoundException); 59 | }); 60 | 61 | it('should limit itself to just the specified child collections', async () => { 62 | const dc = createDynamoMock('query', {}); 63 | const ctx = createContext(dc as unknown as DynamoDBClient, allCollections); 64 | await findByIdWithChildren( 65 | ctx, 66 | 'users', 67 | 'testid', 68 | ['profiles'], 69 | undefined, 70 | { scanForward: false } 71 | ); 72 | expect(dc.send).toHaveBeenCalledWith( 73 | expect.objectContaining({ 74 | input: expect.objectContaining({ 75 | ExpressionAttributeValues: { 76 | ':value0': { S: `users${SEPARATOR}testid` }, 77 | ':value1': { S: `profiles${SEPARATOR}` }, 78 | ':value2': { S: `users${SEPARATOR}\uFFFF` }, 79 | ':value3': { S: `profiles` }, 80 | ':value4': { S: 'users' }, 81 | }, 82 | }), 83 | }) 84 | ); 85 | }); 86 | 87 | it('should search against all the known child collections if none are specified', async () => { 88 | const dc = createDynamoMock('query', {}); 89 | const ctx = createContext(dc as unknown as DynamoDBClient, allCollections); 90 | await findByIdWithChildren(ctx, 'users', 'testid'); 91 | expect(dc.send).toHaveBeenCalledWith( 92 | expect.objectContaining({ 93 | input: expect.objectContaining({ 94 | KeyConditionExpression: 95 | 'pk = :value0 AND sk BETWEEN :value1 AND :value2', 96 | ExpressionAttributeValues: expect.objectContaining({ 97 | ':value0': { S: `users${SEPARATOR}testid` }, 98 | ':value1': { S: `addresses${SEPARATOR}` }, 99 | ':value2': { S: `users${SEPARATOR}\uFFFF` }, 100 | ':value3': { S: 'addresses' }, 101 | ':value4': { S: 'profiles' }, 102 | ':value5': { S: 'users' }, 103 | }), 104 | FilterExpression: '#attr0 IN (:value3,:value4,:value5)', 105 | ExpressionAttributeNames: expect.objectContaining({ 106 | '#attr0': 'type', 107 | }), 108 | }), 109 | }) 110 | ); 111 | }); 112 | 113 | it('should return an empty root object if not found in the results', async () => { 114 | const address1 = { _id: 'testaddress1', userId: 'userId1' }; 115 | const dc = createDynamoMock('query', { 116 | Items: [ 117 | marshall( 118 | toWrapped( 119 | { 120 | ...addressesCollection, 121 | wrapperExtractKeys: [], 122 | } as CollectionDefinition, 123 | address1 124 | ) 125 | ), 126 | ], 127 | }); 128 | const ctx = createContext(dc as unknown as DynamoDBClient, allCollections); 129 | const result = await findByIdWithChildren(ctx, 'users', 'testid'); 130 | 131 | expect(result.root).toBeUndefined(); 132 | expect(result.children.addresses).toEqual([address1]); 133 | expect(result.children.profiles).toEqual([]); 134 | }); 135 | 136 | it('should fill out the root if found in the results', async () => { 137 | const address1 = { _id: 'testaddress1', userId: 'userId1' }; 138 | const root1 = { _id: 'root1' }; 139 | const rootDefinition = { 140 | ...rootCollection, 141 | wrapperExtractKeys: [], 142 | } as CollectionDefinition; 143 | const addressesDefinition = { 144 | ...addressesCollection, 145 | wrapperExtractKeys: [], 146 | } as CollectionDefinition; 147 | const dc = createDynamoMock('query', { 148 | Items: [ 149 | marshall(toWrapped(addressesDefinition, address1)), 150 | marshall(toWrapped(rootDefinition, root1)), 151 | ], 152 | }); 153 | const ctx = createContext(dc as unknown as DynamoDBClient, allCollections); 154 | const result = await findByIdWithChildren(ctx, 'users', 'testid'); 155 | 156 | expect(result.root).toEqual(root1); 157 | expect(result.children.addresses).toEqual([address1]); 158 | expect(result.children.profiles).toEqual([]); 159 | }); 160 | }); 161 | -------------------------------------------------------------------------------- /src/operations/find_child_by_id.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { marshall } from '@aws-sdk/util-dynamodb'; 3 | import { createDynamoMock } from '../../testutil/dynamo_mock'; 4 | import { createContext } from '../context'; 5 | import { findChildById } from './find_child_by_id'; 6 | import { Collection } from '../base/collection'; 7 | 8 | describe('findChildById', () => { 9 | const layout = { 10 | tableName: 'testtable', 11 | primaryKey: { partitionKey: 'id', sortKey: 'collection' }, 12 | }; 13 | 14 | const rootCollection: Collection = { 15 | name: 'root-collection', 16 | type: 'root', 17 | layout, 18 | }; 19 | const childCollection: Collection = { 20 | name: 'test-collection', 21 | layout, 22 | type: 'child', 23 | parentCollectionName: 'root-collection', 24 | foreignKeyPath: ['rootId'], 25 | }; 26 | 27 | test('returns undefined when it cannot find a value', async () => { 28 | const getItemReturnValue = {}; 29 | const ddb = createDynamoMock('getItem', getItemReturnValue); 30 | const context = createContext(ddb as unknown as DynamoDBClient, [ 31 | rootCollection, 32 | childCollection, 33 | ]); 34 | expect( 35 | await findChildById(context, 'test-collection', 'test-id1', 'root-id-1') 36 | ).toBeUndefined(); 37 | 38 | expect(ddb.send.mock.calls[0][0]).toEqual( 39 | expect.objectContaining({ 40 | input: { 41 | TableName: 'testtable', 42 | Key: { 43 | id: { S: 'root-collection|-|root-id-1' }, 44 | collection: { S: 'test-collection|-|test-id1' }, 45 | }, 46 | }, 47 | }) 48 | ); 49 | }); 50 | 51 | test('returns the unwrapped value when it exists', async () => { 52 | const item = { 53 | value: { 54 | _id: 'test-id1', 55 | location: { 56 | lat: 123, 57 | lon: 456.78, 58 | }, 59 | }, 60 | }; 61 | const getItemReturnValue = { 62 | Item: marshall(item), 63 | }; 64 | const ddb = createDynamoMock('getItem', getItemReturnValue); 65 | const context = createContext(ddb as unknown as DynamoDBClient, [ 66 | rootCollection, 67 | childCollection, 68 | ]); 69 | 70 | expect( 71 | await findChildById(context, 'test-collection', 'test-id1', 'root-id-1') 72 | ).toEqual(item.value); 73 | 74 | expect(ddb.send.mock.calls[0][0].input).toEqual({ 75 | TableName: 'testtable', 76 | Key: { 77 | id: { S: 'root-collection|-|root-id-1' }, 78 | collection: { S: 'test-collection|-|test-id1' }, 79 | }, 80 | }); 81 | }); 82 | 83 | test('works with a custom layout index key separator correctly', async () => { 84 | const getItemReturnValue = {}; 85 | const ddb = createDynamoMock('getItem', getItemReturnValue); 86 | const customLayout = { ...layout, indexKeySeparator: '#' }; 87 | const customRootCollection = { ...rootCollection, layout: customLayout }; 88 | const customChildCollection = { ...childCollection, layout: customLayout }; 89 | const context = createContext(ddb as unknown as DynamoDBClient, [ 90 | customRootCollection, 91 | customChildCollection, 92 | ]); 93 | expect( 94 | await findChildById(context, 'test-collection', 'test-id1', 'root-id-1') 95 | ).toBeUndefined(); 96 | 97 | expect(ddb.send.mock.calls[0][0].input).toEqual({ 98 | TableName: 'testtable', 99 | Key: { 100 | id: { S: 'root-collection#root-id-1' }, 101 | collection: { S: 'test-collection#test-id1' }, 102 | }, 103 | }); 104 | }); 105 | }); 106 | -------------------------------------------------------------------------------- /src/operations/find_child_by_id.ts: -------------------------------------------------------------------------------- 1 | import { GetItemCommand, GetItemInput } from '@aws-sdk/client-dynamodb'; 2 | import { marshall, unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { Context } from '../context'; 4 | import { 5 | unwrap, 6 | assemblePrimaryKeyValue, 7 | getChildCollection, 8 | } from '../base/util'; 9 | import { DocumentWithId, WrappedDocument } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | 12 | /** 13 | * Retrieve a child item by its `_id` field and its parent `_id`. 14 | * 15 | * Child objects can only be directly retrieved with both the parent 16 | * and child _id. This limitation allows them to be stored without 17 | * an extra index, and enables the `findChildren` method to retrieve 18 | * all the children of a root object in the same call. 19 | * 20 | * @category Query 21 | * 22 | * @param context the context object 23 | * @param collectionName name of the collection to search 24 | * @param id the `_id` value 25 | * @param rootObjectId the _id of the root object 26 | * @param options the the set of options for the search 27 | * @param options.consistentRead search with strongly consistent reads 28 | * @returns the stored value, or `undefined` if not found 29 | * @throws {CollectionNotFoundException} when the collection is not found in the context 30 | */ 31 | export async function findChildById( 32 | context: Context, 33 | collectionName: string, 34 | id: string, 35 | rootObjectId: string, 36 | options?: { 37 | consistentRead?: boolean; 38 | } 39 | ): Promise { 40 | const collection = getChildCollection(context, collectionName); 41 | const request: GetItemInput = { 42 | TableName: collection.layout.tableName, 43 | ConsistentRead: options?.consistentRead, 44 | Key: marshall( 45 | { 46 | [collection.layout.primaryKey.partitionKey]: assemblePrimaryKeyValue( 47 | collection.parentCollectionName, 48 | rootObjectId, 49 | collection.layout.indexKeySeparator 50 | ), 51 | [collection.layout.primaryKey.sortKey]: assemblePrimaryKeyValue( 52 | collectionName, 53 | id, 54 | collection.layout.indexKeySeparator 55 | ), 56 | }, 57 | { convertEmptyValues: false, removeUndefinedValues: true } 58 | ), 59 | }; 60 | debugDynamo('GetItem', request); 61 | const command = new GetItemCommand(request); 62 | const result = await context.ddb.send(command); 63 | if (result.Item) { 64 | const wrapped = unmarshall(result.Item); 65 | return unwrap(wrapped as WrappedDocument); 66 | } 67 | return undefined; 68 | } 69 | -------------------------------------------------------------------------------- /src/operations/find_children.ts: -------------------------------------------------------------------------------- 1 | import { QueryCommand, QueryInput } from '@aws-sdk/client-dynamodb'; 2 | import { unmarshall } from '@aws-sdk/util-dynamodb'; 3 | import { 4 | getChildCollection, 5 | assemblePrimaryKeyValue, 6 | unwrap, 7 | } from '../base/util'; 8 | import { Context } from '../context'; 9 | import { DocumentWithId, Key, WrappedDocument } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | import { CompositeCondition } from '../base/conditions'; 12 | import { createNameMapper, createValueMapper } from '../base/mappers'; 13 | import { InvalidRangeOperatorException } from '../base/exceptions'; 14 | import { decrementLast, incrementLast } from '../base/lexo'; 15 | import { parseCompositeCondition } from '../base/conditions_parser'; 16 | 17 | /** 18 | * The results of a [[findChildren]] operation. 19 | */ 20 | export type FindChildrenResults = { 21 | /** the items that were returned in this batch */ 22 | items: DocumentType[]; 23 | /** The pagination token. If this value is specified, it means 24 | * there is more results for the query. Provide it to another 25 | * call to `findChildren` to get the next set of results. 26 | */ 27 | nextToken?: Key; 28 | }; 29 | 30 | /** 31 | * The range expression to use on the sort key (`_id`) when 32 | * querying for child collection objects with 33 | * {@link findChildren}. 34 | */ 35 | export type KeyRangeExpression = 36 | | { 37 | op: 'gte' | 'gt' | 'lte' | 'lt' | 'begins_with'; 38 | value: string; 39 | } 40 | | { 41 | op: 'between'; 42 | min: string; 43 | max: string; 44 | }; 45 | 46 | /** 47 | * The options to a [[findChildren]] operation 48 | */ 49 | export type FindChildrenOptions = { 50 | /* 51 | * The item limit to pass to DynamoDB 52 | */ 53 | limit?: number; 54 | /** 55 | * `true` (default) to scan the index forward, `false` to scan it backward 56 | */ 57 | scanForward?: boolean; 58 | /** 59 | * An optional filter expression for the 60 | * find operation 61 | */ 62 | filter?: CompositeCondition; 63 | 64 | /** 65 | * The range of children to retrieve (on the child's ID value) 66 | */ 67 | range?: KeyRangeExpression; 68 | }; 69 | 70 | /** 71 | * Find all the child objects of a root (top-level) object. 72 | * 73 | * The parent collection is determined by the reference in the 74 | * child collection. 75 | * 76 | * This method has a `nextToken`, which is used for pagination - it 77 | * is returned when there is more values to retrieve. You should 78 | * write your code to repeatedly call findChildren with the nextToken 79 | * value of the previous call until it comes back undefined in order 80 | * to retrieve all the values. 81 | * 82 | * @category Query 83 | * 84 | * @param ctx the context 85 | * @param childCollectionName name of the child object collection 86 | * @param rootObjectId the `_id` of the root object 87 | * @param nextToken the next token from the previous call, or `undefined` if there are no more values 88 | * @param options the options to control the query 89 | * @param options.limit number of records to return 90 | * @param options.scanForward=true true for ascending index order 91 | * @param options.filter an optional filter expression to apply 92 | * @throws {CollectionNotFoundException} when the collection is not found in the context 93 | */ 94 | export async function findChildren( 95 | ctx: Context, 96 | childCollectionName: string, 97 | rootObjectId: string, 98 | nextToken?: Key, 99 | options: FindChildrenOptions = {} 100 | ): Promise> { 101 | const childCollection = getChildCollection(ctx, childCollectionName); 102 | const nameMapper = createNameMapper(); 103 | const valueMapper = createValueMapper(); 104 | 105 | const { 106 | parentCollectionName, 107 | layout: { 108 | primaryKey: { partitionKey, sortKey }, 109 | }, 110 | } = childCollection; 111 | 112 | const parentId = assemblePrimaryKeyValue( 113 | parentCollectionName, 114 | rootObjectId, 115 | childCollection.layout.indexKeySeparator 116 | ); 117 | 118 | const partitionKeyExpression = `${nameMapper.map( 119 | partitionKey 120 | )} = ${valueMapper.map(parentId)}`; 121 | let sortKeyExpression: string; 122 | if (options.range) { 123 | // for <, <=, >, >=, we can't use the builtin DynamoDB operators because they 124 | // will include items from other collections naturally. Instead, we construct 125 | // a barrier for highest and lowest and use the `BETWEEN` operator (similar 126 | // to findByIdWithChildren) 127 | switch (options.range.op) { 128 | case 'gt': { 129 | const childCollectionMin = assemblePrimaryKeyValue( 130 | childCollectionName, 131 | incrementLast(options.range.value), 132 | childCollection.layout.indexKeySeparator 133 | ); 134 | const childCollectionMax = assemblePrimaryKeyValue( 135 | childCollectionName, 136 | '\uFFFF', 137 | childCollection.layout.indexKeySeparator 138 | ); 139 | sortKeyExpression = `${nameMapper.map( 140 | sortKey 141 | )} BETWEEN ${valueMapper.map(childCollectionMin)} AND ${valueMapper.map( 142 | childCollectionMax 143 | )}`; 144 | break; 145 | } 146 | case 'lt': { 147 | const childCollectionMax = assemblePrimaryKeyValue( 148 | childCollectionName, 149 | decrementLast(options.range.value), 150 | childCollection.layout.indexKeySeparator 151 | ); 152 | const childCollectionMin = assemblePrimaryKeyValue( 153 | childCollectionName, 154 | '', 155 | childCollection.layout.indexKeySeparator 156 | ); 157 | sortKeyExpression = `${nameMapper.map( 158 | sortKey 159 | )} BETWEEN ${valueMapper.map(childCollectionMin)} AND ${valueMapper.map( 160 | childCollectionMax 161 | )}`; 162 | break; 163 | } 164 | case 'lte': { 165 | const childCollectionMax = assemblePrimaryKeyValue( 166 | childCollectionName, 167 | options.range.value, 168 | childCollection.layout.indexKeySeparator 169 | ); 170 | const childCollectionMin = assemblePrimaryKeyValue( 171 | childCollectionName, 172 | '', 173 | childCollection.layout.indexKeySeparator 174 | ); 175 | sortKeyExpression = `${nameMapper.map( 176 | sortKey 177 | )} BETWEEN ${valueMapper.map(childCollectionMin)} AND ${valueMapper.map( 178 | childCollectionMax 179 | )}`; 180 | break; 181 | } 182 | case 'gte': { 183 | const childCollectionMin = assemblePrimaryKeyValue( 184 | childCollectionName, 185 | options.range.value, 186 | childCollection.layout.indexKeySeparator 187 | ); 188 | const childCollectionMax = assemblePrimaryKeyValue( 189 | childCollectionName, 190 | '\uFFFF', 191 | childCollection.layout.indexKeySeparator 192 | ); 193 | sortKeyExpression = `${nameMapper.map( 194 | sortKey 195 | )} BETWEEN ${valueMapper.map(childCollectionMin)} AND ${valueMapper.map( 196 | childCollectionMax 197 | )}`; 198 | break; 199 | } 200 | case 'begins_with': { 201 | const childCollectionValue = assemblePrimaryKeyValue( 202 | childCollectionName, 203 | options.range.value, 204 | childCollection.layout.indexKeySeparator 205 | ); 206 | sortKeyExpression = `begins_with(${nameMapper.map( 207 | sortKey 208 | )}, ${valueMapper.map(childCollectionValue)})`; 209 | break; 210 | } 211 | case 'between': { 212 | const childCollectionMin = assemblePrimaryKeyValue( 213 | childCollectionName, 214 | options.range.min, 215 | childCollection.layout.indexKeySeparator 216 | ); 217 | const childCollectionMax = assemblePrimaryKeyValue( 218 | childCollectionName, 219 | options.range.max, 220 | childCollection.layout.indexKeySeparator 221 | ); 222 | sortKeyExpression = `${nameMapper.map( 223 | sortKey 224 | )} BETWEEN ${valueMapper.map(childCollectionMin)} AND ${valueMapper.map( 225 | childCollectionMax 226 | )}`; 227 | break; 228 | } 229 | default: 230 | throw new InvalidRangeOperatorException( 231 | 'Unknown range operator', 232 | (options.range as KeyRangeExpression).op 233 | ); 234 | } 235 | } else { 236 | const childCollectionPrefix = assemblePrimaryKeyValue( 237 | childCollectionName, 238 | '', 239 | childCollection.layout.indexKeySeparator 240 | ); 241 | sortKeyExpression = `begins_with(${nameMapper.map( 242 | sortKey 243 | )}, ${valueMapper.map(childCollectionPrefix)})`; 244 | } 245 | const keyConditionExpression = `${partitionKeyExpression} AND ${sortKeyExpression}`; 246 | 247 | let filterExpression; 248 | if (options?.filter) { 249 | filterExpression = parseCompositeCondition(options.filter, { 250 | nameMapper, 251 | valueMapper, 252 | parsePath: [], 253 | }); 254 | } 255 | 256 | const request: QueryInput = { 257 | TableName: childCollection.layout.tableName, 258 | KeyConditionExpression: keyConditionExpression, 259 | ExpressionAttributeNames: nameMapper.get(), 260 | ExpressionAttributeValues: valueMapper.get(), 261 | ExclusiveStartKey: nextToken, 262 | Limit: options?.limit, 263 | ScanIndexForward: options?.scanForward ?? true, 264 | FilterExpression: filterExpression, 265 | }; 266 | 267 | debugDynamo('Query', request); 268 | const command = new QueryCommand(request); 269 | const results = await ctx.ddb.send(command); 270 | return { 271 | items: (results.Items || []).map((item) => 272 | unwrap(unmarshall(item) as WrappedDocument) 273 | ), 274 | nextToken: results.LastEvaluatedKey, 275 | }; 276 | } 277 | -------------------------------------------------------------------------------- /src/operations/index.ts: -------------------------------------------------------------------------------- 1 | export { findById } from './find_by_id'; 2 | export { insert } from './insert'; 3 | export { find } from './find'; 4 | export { deleteById } from './delete_by_id'; 5 | export { replace } from './replace'; 6 | export { findChildren } from './find_children'; 7 | export { findChildById } from './find_child_by_id'; 8 | export { deleteChildById } from './delete_child_by_id'; 9 | export { updateById } from './update_by_id'; 10 | export { updateChildById } from './update_child_by_id'; 11 | export { findByIdWithChildren } from './find_by_id_with_children'; 12 | export { batchFindByIds } from './batch_find_by_ids'; 13 | export { batchReplaceDelete } from './batch_replace_delete'; 14 | export { transactionWrite } from './transact_write'; 15 | -------------------------------------------------------------------------------- /src/operations/insert.test.ts: -------------------------------------------------------------------------------- 1 | import { CollectionLayout } from '../base/layout'; 2 | import { 3 | createDynamoMock, 4 | createDynamoMockError, 5 | createAWSError, 6 | } from '../../testutil/dynamo_mock'; 7 | import { createContext } from '../context'; 8 | import { insert } from './insert'; 9 | import { 10 | ConflictException, 11 | InvalidIndexedFieldValueException, 12 | } from '../base/exceptions'; 13 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 14 | import { ChildCollection, RootCollection } from '../base/collection'; 15 | 16 | jest.mock('../base/new_id', () => ({ 17 | __esModule: true, 18 | default: jest.fn().mockImplementation(() => 'test-id'), 19 | })); 20 | 21 | describe('insert', () => { 22 | const layout: CollectionLayout = { 23 | tableName: 'my-objects', 24 | primaryKey: { partitionKey: 'pkey', sortKey: 'skey' }, 25 | }; 26 | const collection: RootCollection = { 27 | name: 'users', 28 | layout, 29 | }; 30 | 31 | const collectionWithRequiredPaths: RootCollection = { 32 | name: 'users', 33 | layout: { 34 | ...layout, 35 | findKeys: [ 36 | { indexName: 'index1', partitionKey: 'gpk1', sortKey: 'gsk1' }, 37 | ], 38 | }, 39 | accessPatterns: [ 40 | { 41 | indexName: 'index1', 42 | partitionKeys: [['email']], 43 | sortKeys: [['location']], 44 | requiredPaths: [['email'], ['location']], 45 | }, 46 | ], 47 | }; 48 | 49 | const childCollection: ChildCollection = { 50 | name: 'addresses', 51 | type: 'child', 52 | layout, 53 | parentCollectionName: 'users', 54 | foreignKeyPath: ['userId'], 55 | }; 56 | 57 | test('should insert a root item conditionally', async () => { 58 | const ddb = createDynamoMock('putItem', {}); 59 | const context = createContext(ddb as unknown as DynamoDBClient, [ 60 | collection, 61 | childCollection, 62 | ]); 63 | 64 | const value = { name: 'Chris', email: 'chris@example.com' }; 65 | const result = await insert(context, 'users', value); 66 | expect(result).toHaveProperty('_id'); 67 | 68 | const request = ddb.send.mock.calls[0][0].input; 69 | expect(request.TableName).toBe('my-objects'); 70 | expect(request.Item).toBeDefined(); 71 | expect(request.ConditionExpression).toMatch('attribute_not_exists'); 72 | expect(request.ExpressionAttributeNames['#idAttribute']).toBe( 73 | 'users|-|test-id' 74 | ); 75 | }); 76 | 77 | test('should insert a root item with required paths', async () => { 78 | const ddb = createDynamoMock('putItem', {}); 79 | const context = createContext(ddb as unknown as DynamoDBClient, [ 80 | collectionWithRequiredPaths, 81 | ]); 82 | 83 | const value = { 84 | name: 'name', 85 | email: 'test@example.com', 86 | location: 'Wonderland', 87 | }; 88 | const result = await insert(context, 'users', value); 89 | expect(result).toHaveProperty('_id'); 90 | 91 | const request = ddb.send.mock.calls[0][0].input; 92 | expect(request).toEqual({ 93 | TableName: 'my-objects', 94 | Item: { 95 | pkey: { S: 'users|-|test-id' }, 96 | skey: { S: 'users|-|test-id' }, 97 | value: { 98 | M: { 99 | name: { S: 'name' }, 100 | email: { S: 'test@example.com' }, 101 | location: { S: 'Wonderland' }, 102 | _id: { S: 'test-id' }, 103 | }, 104 | }, 105 | type: { S: 'users' }, 106 | gpk1: { S: 'users|-|test@example.com' }, 107 | gsk1: { S: 'users|-|Wonderland' }, 108 | }, 109 | ReturnValues: 'NONE', 110 | ConditionExpression: 'attribute_not_exists(#idAttribute)', 111 | ExpressionAttributeNames: { '#idAttribute': 'users|-|test-id' }, 112 | }); 113 | }); 114 | 115 | test('should throw if required path is missing (pk)', async () => { 116 | const ddb = createDynamoMock('putItem', {}); 117 | const context = createContext(ddb as unknown as DynamoDBClient, [ 118 | collectionWithRequiredPaths, 119 | ]); 120 | 121 | const value = { 122 | // email is required 123 | name: 'name', 124 | location: 'Wonderland', 125 | }; 126 | await expect(() => insert(context, 'users', value)).rejects.toThrow( 127 | InvalidIndexedFieldValueException 128 | ); 129 | }); 130 | 131 | test('should throw if required path is missing (sk)', async () => { 132 | const ddb = createDynamoMock('putItem', {}); 133 | const context = createContext(ddb as unknown as DynamoDBClient, [ 134 | collectionWithRequiredPaths, 135 | ]); 136 | 137 | const value = { 138 | // location is required 139 | name: 'name', 140 | email: 'test@example.com', 141 | }; 142 | await expect(() => insert(context, 'users', value)).rejects.toThrow( 143 | InvalidIndexedFieldValueException 144 | ); 145 | }); 146 | 147 | test('should work with custom separators', async () => { 148 | const ddb = createDynamoMock('putItem', {}); 149 | const rootCollection = { 150 | ...collection, 151 | layout: { ...layout, indexKeySeparator: '#' }, 152 | }; 153 | const context = createContext(ddb as unknown as DynamoDBClient, [ 154 | rootCollection, 155 | ]); 156 | 157 | const value = { name: 'Chris', email: 'chris@example.com' }; 158 | const result = await insert(context, 'users', value); 159 | expect(result).toHaveProperty('_id'); 160 | 161 | const request = ddb.send.mock.calls[0][0].input; 162 | expect(request.TableName).toBe('my-objects'); 163 | expect(request.Item).toBeDefined(); 164 | expect(request.ConditionExpression).toMatch('attribute_not_exists'); 165 | expect(request.ExpressionAttributeNames['#idAttribute']).toBe( 166 | 'users#test-id' 167 | ); 168 | }); 169 | 170 | test('should insert a child item conditionally', async () => { 171 | const ddb = createDynamoMock('putItem', {}); 172 | const context = createContext(ddb as unknown as DynamoDBClient, [ 173 | collection, 174 | childCollection, 175 | ]); 176 | 177 | const value = { 178 | firstLine: '80 Place St', 179 | suburb: 'Town', 180 | country: 'UK', 181 | userId: 'user-id-1', 182 | }; 183 | const result = await insert(context, 'addresses', value); 184 | expect(result).toHaveProperty('_id'); 185 | 186 | const request = ddb.send.mock.calls[0][0].input; 187 | expect(request.TableName).toBe('my-objects'); 188 | expect(request.Item).toBeDefined(); 189 | expect(request.ConditionExpression).toMatch('attribute_not_exists'); 190 | expect(request.ExpressionAttributeNames['#parentIdAttribute']).toBe( 191 | 'users|-|user-id-1' 192 | ); 193 | expect(request.ExpressionAttributeNames['#childIdAttribute']).toBe( 194 | 'addresses|-|test-id' 195 | ); 196 | }); 197 | 198 | test('should wrap and throw an exception if the item already exists', async () => { 199 | const ddb = createDynamoMockError( 200 | 'putItem', 201 | createAWSError( 202 | 'ConditionalCheckFailedException', 203 | 'The conditional check failed' 204 | ) 205 | ); 206 | const context = createContext(ddb as unknown as DynamoDBClient, [ 207 | collection, 208 | ]); 209 | 210 | const value = { _id: 'test-id', name: 'Chris', email: 'chris@example.com' }; 211 | expect(insert(context, 'users', value)).rejects.toThrowError( 212 | ConflictException 213 | ); 214 | }); 215 | }); 216 | -------------------------------------------------------------------------------- /src/operations/insert.ts: -------------------------------------------------------------------------------- 1 | import { Context } from '../context'; 2 | import { 3 | getCollection, 4 | toWrapped, 5 | assemblePrimaryKeyValue, 6 | } from '../base/util'; 7 | import { ConflictException } from '../base/exceptions'; 8 | import get from 'lodash/get'; 9 | import { DocumentWithId } from '../base/common'; 10 | import debugDynamo from '../debug/debugDynamo'; 11 | import { PutItemCommand, PutItemInput } from '@aws-sdk/client-dynamodb'; 12 | import { marshall } from '@aws-sdk/util-dynamodb'; 13 | 14 | /** 15 | * Insert a value into a collection. Adds an _id field to the value 16 | * if one is not provided using the `bson` ID generator (similar to 17 | * MongoDB IDs). 18 | * 19 | * @category Mutation 20 | * 21 | * @param context the context to use 22 | * @param collectionName name of the collection 23 | * @param value value to insert 24 | * @returns a copy of the inserted value, with appended `_id` field if not provided 25 | * @throws {ConflictException} when an item with the same _id already exists 26 | */ 27 | export async function insert( 28 | context: Context, 29 | collectionName: string, 30 | value: Record 31 | ): Promise { 32 | const collection = getCollection(context, collectionName); 33 | const wrapped = toWrapped(collection, value); 34 | let request: PutItemInput; 35 | if (collection.type === 'child') { 36 | request = { 37 | TableName: collection.layout.tableName, 38 | Item: marshall(wrapped, { convertEmptyValues: false, removeUndefinedValues: true }), 39 | ReturnValues: 'NONE', 40 | ConditionExpression: 41 | 'attribute_not_exists(#parentIdAttribute) and attribute_not_exists(#childIdAttribute)', 42 | ExpressionAttributeNames: { 43 | '#parentIdAttribute': assemblePrimaryKeyValue( 44 | collection.parentCollectionName, 45 | get(value, collection.foreignKeyPath), 46 | collection.layout.indexKeySeparator 47 | ), 48 | '#childIdAttribute': assemblePrimaryKeyValue( 49 | collection.name, 50 | wrapped.value._id, 51 | collection.layout.indexKeySeparator 52 | ), 53 | }, 54 | }; 55 | } else { 56 | request = { 57 | TableName: collection.layout.tableName, 58 | Item: marshall(wrapped, { convertEmptyValues: false, removeUndefinedValues: true }), 59 | ReturnValues: 'NONE', 60 | ConditionExpression: 'attribute_not_exists(#idAttribute)', 61 | ExpressionAttributeNames: { 62 | '#idAttribute': assemblePrimaryKeyValue( 63 | collection.name, 64 | wrapped.value._id, 65 | collection.layout.indexKeySeparator 66 | ), 67 | }, 68 | }; 69 | } 70 | try { 71 | debugDynamo('PutItem', request); 72 | const command = new PutItemCommand(request); 73 | await context.ddb.send(command); 74 | } catch (error) { 75 | if ((error as Error).name === 'ConditionalCheckFailedException') { 76 | throw new ConflictException( 77 | 'An item with this _id already exists', 78 | wrapped.value._id 79 | ); 80 | } 81 | throw error; 82 | } 83 | return wrapped.value; 84 | } 85 | -------------------------------------------------------------------------------- /src/operations/replace.test.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; 2 | import { CollectionLayout } from '../base/layout'; 3 | import { 4 | createDynamoMock, 5 | } from '../../testutil/dynamo_mock'; 6 | import { createContext } from '../context'; 7 | import { replace } from './replace'; 8 | import { RootCollection } from '../base/collection'; 9 | import { InvalidIndexedFieldValueException } from '../base/exceptions'; 10 | 11 | describe('replace', () => { 12 | const layout: CollectionLayout = { 13 | tableName: 'my-objects', 14 | primaryKey: { partitionKey: 'pkey', sortKey: 'skey' }, 15 | }; 16 | const collection = { 17 | name: 'users', 18 | layout, 19 | }; 20 | 21 | const collectionWithRequiredPaths: RootCollection = { 22 | name: 'users', 23 | layout: { 24 | ...layout, 25 | findKeys: [ 26 | { indexName: 'index1', partitionKey: 'gpk1', sortKey: 'gsk1' }, 27 | ], 28 | }, 29 | accessPatterns: [ 30 | { 31 | indexName: 'index1', 32 | partitionKeys: [['email']], 33 | sortKeys: [['location']], 34 | requiredPaths: [['email'], ['location']], 35 | }, 36 | ], 37 | }; 38 | 39 | test('should replace an item conditionally', async () => { 40 | const ddb = createDynamoMock('putItem', {}); 41 | const context = createContext(ddb as unknown as DynamoDBClient, [ 42 | collection, 43 | ]); 44 | 45 | const value = { _id: 'test-id', name: 'Chris', email: 'chris@example.com' }; 46 | const result = await replace(context, 'users', value); 47 | expect(result).toHaveProperty('_id'); 48 | 49 | const request = ddb.send.mock.calls[0][0].input; 50 | expect(request.TableName).toBe('my-objects'); 51 | expect(request.Item).toBeDefined(); 52 | }); 53 | 54 | test('should insert a root item with required paths', async () => { 55 | const ddb = createDynamoMock('putItem', {}); 56 | const context = createContext(ddb as unknown as DynamoDBClient, [ 57 | collectionWithRequiredPaths, 58 | ]); 59 | 60 | const value = { 61 | _id: 'test-id', 62 | name: 'name', 63 | email: 'test@example.com', 64 | location: 'Wonderland', 65 | }; 66 | const result = await replace(context, 'users', value); 67 | expect(result).toHaveProperty('_id'); 68 | 69 | const request = ddb.send.mock.calls[0][0].input; 70 | expect(request).toEqual({ 71 | TableName: 'my-objects', 72 | Item: { 73 | pkey: { S: 'users|-|test-id' }, 74 | skey: { S: 'users|-|test-id' }, 75 | value: { 76 | M: { 77 | name: { S: 'name' }, 78 | email: { S: 'test@example.com' }, 79 | location: { S: 'Wonderland' }, 80 | _id: { S: 'test-id' }, 81 | }, 82 | }, 83 | type: { S: 'users' }, 84 | gpk1: { S: 'users|-|test@example.com' }, 85 | gsk1: { S: 'users|-|Wonderland' }, 86 | }, 87 | ReturnValues: 'NONE', 88 | }); 89 | }); 90 | 91 | test('should throw if required path is missing (pk)', async () => { 92 | const ddb = createDynamoMock('putItem', {}); 93 | const context = createContext(ddb as unknown as DynamoDBClient, [ 94 | collectionWithRequiredPaths, 95 | ]); 96 | 97 | const value = { 98 | _id: 'test-id', 99 | // email is required 100 | name: 'name', 101 | location: 'Wonderland', 102 | }; 103 | await expect(() => replace(context, 'users', value)).rejects.toThrow( 104 | InvalidIndexedFieldValueException 105 | ); 106 | }); 107 | 108 | test('should throw if required path is missing (sk)', async () => { 109 | const ddb = createDynamoMock('putItem', {}); 110 | const context = createContext(ddb as unknown as DynamoDBClient, [ 111 | collectionWithRequiredPaths, 112 | ]); 113 | 114 | const value = { 115 | _id: 'test-id', 116 | // location is required 117 | name: 'name', 118 | email: 'test@example.com', 119 | }; 120 | await expect(() => replace(context, 'users', value)).rejects.toThrow( 121 | InvalidIndexedFieldValueException 122 | ); 123 | }); 124 | }); 125 | -------------------------------------------------------------------------------- /src/operations/replace.ts: -------------------------------------------------------------------------------- 1 | import { PutItemCommand, PutItemInput } from '@aws-sdk/client-dynamodb'; 2 | import { marshall } from '@aws-sdk/util-dynamodb'; 3 | import { Context } from '../context'; 4 | import { toWrapped, getCollection } from '../base/util'; 5 | import { DocumentWithId, WrappedDocument } from '../base/common'; 6 | import { createNameMapper, createValueMapper } from '../base/mappers'; 7 | import debugDynamo from '../debug/debugDynamo'; 8 | import { CompositeCondition } from '../base/conditions'; 9 | import { parseCompositeCondition } from '../base/conditions_parser'; 10 | 11 | /** 12 | * Insert or replace a value in a collection. 13 | * 14 | * This operation differs from [[insert]] in that it does not check for the existence 15 | * of a document with the same `_id` value - it will replace whatever is there. 16 | * 17 | * @category Mutation 18 | * 19 | * @param context the context 20 | * @param collectionName the collection to update 21 | * @param value the document to insert or replace 22 | * @param options options to apply 23 | * @param options.condition an optional conditional expression that must be satisfied for the update to proceed 24 | * @returns the inserted / replaced value 25 | * @throws {CollectionNotFoundException} when the collection is not found 26 | */ 27 | export async function replace( 28 | context: Context, 29 | collectionName: string, 30 | value: Record, 31 | options: { condition?: CompositeCondition } = {} 32 | ): Promise { 33 | const { request, wrapped } = createReplaceByIdRequest( 34 | context, 35 | collectionName, 36 | value, 37 | options 38 | ); 39 | 40 | debugDynamo('PutItem', request); 41 | const command = new PutItemCommand(request); 42 | await context.ddb.send(command); 43 | return (wrapped as WrappedDocument).value; 44 | } 45 | 46 | /** 47 | * Create a request for Insert or Replace request 48 | * 49 | * This operation differs from [[insert]] in that it does not check for the existence 50 | * of a document with the same `_id` value - it will replace whatever is there. 51 | * 52 | * @category Mutation 53 | * 54 | * @param context the context 55 | * @param collectionName the collection to update 56 | * @param value the document to insert or replace 57 | * @param options options to apply 58 | * @param options.condition an optional conditional expression that must be satisfied for the update to proceed 59 | * @returns the inserted / replaced request @see {PutItemInput} 60 | * @throws {CollectionNotFoundException} when the collection is not found 61 | */ 62 | export const createReplaceByIdRequest = ( 63 | context: Context, 64 | collectionName: string, 65 | value: Record, 66 | options: { condition?: CompositeCondition } = {} 67 | ): { request: PutItemInput; wrapped: WrappedDocument } => { 68 | const collection = getCollection(context, collectionName); 69 | const wrapped = toWrapped(collection, value); 70 | 71 | let conditionExpression; 72 | const nameMapper = createNameMapper(); 73 | const valueMapper = createValueMapper(); 74 | if (options.condition) { 75 | conditionExpression = parseCompositeCondition(options.condition, { 76 | nameMapper, 77 | valueMapper, 78 | parsePath: [], 79 | }); 80 | } 81 | 82 | const item = marshall(wrapped, { 83 | convertEmptyValues: false, 84 | removeUndefinedValues: true, 85 | }); 86 | const request: PutItemInput = { 87 | TableName: collection.layout.tableName, 88 | Item: item, 89 | ReturnValues: 'NONE', 90 | ConditionExpression: conditionExpression, 91 | ExpressionAttributeNames: nameMapper.get(), 92 | ExpressionAttributeValues: valueMapper.get(), 93 | }; 94 | 95 | return { request, wrapped }; 96 | }; 97 | -------------------------------------------------------------------------------- /src/operations/transact_find_by_ids.ts: -------------------------------------------------------------------------------- 1 | import { 2 | TransactGetItem, 3 | TransactGetItemsCommand, 4 | } from '@aws-sdk/client-dynamodb'; 5 | import { convertToAttr, unmarshall } from '@aws-sdk/util-dynamodb'; 6 | import { Context } from '../context'; 7 | import { InvalidFindDescriptorException } from '../base/exceptions'; 8 | import { 9 | getChildCollection, 10 | getRootCollection, 11 | assemblePrimaryKeyValue, 12 | unwrap, 13 | } from '../base/util'; 14 | import { DocumentWithId, WrappedDocument } from '../base/common'; 15 | import debugDynamo from '../debug/debugDynamo'; 16 | import { ChildCollection } from '../base/collection'; 17 | 18 | /** 19 | * The collection and ID of a root or child 20 | * item to retrieve with #transactFindByIds 21 | */ 22 | export type TransactFindByIdDescriptor = { 23 | /** The collection containing the item */ 24 | collection: string; 25 | /* The ID of the item */ 26 | id: string; 27 | /* The parent ID of the item (if a child item) */ 28 | rootId?: string; 29 | }; 30 | 31 | export const transactFindByIds = async ( 32 | ctx: Context, 33 | items: TransactFindByIdDescriptor[] 34 | ): Promise<(DocumentType | null)[]> => { 35 | if (items.length === 0) { 36 | throw new InvalidFindDescriptorException( 37 | 'At least one find descriptor must be specified' 38 | ); 39 | } else if (items.length > 25) { 40 | throw new InvalidFindDescriptorException( 41 | 'No more than 25 find descriptors can be specified to transactFindByIds' 42 | ); 43 | } 44 | const transactGetItems: TransactGetItem[] = items.map( 45 | ({ collection, id, rootId }) => { 46 | const collectionDefinition = rootId 47 | ? getChildCollection(ctx, collection) 48 | : getRootCollection(ctx, collection); 49 | const { 50 | layout: { tableName, primaryKey, indexKeySeparator }, 51 | } = collectionDefinition; 52 | return { 53 | Get: { 54 | TableName: tableName, 55 | Key: { 56 | [primaryKey.partitionKey]: convertToAttr( 57 | assemblePrimaryKeyValue( 58 | rootId 59 | ? (collectionDefinition as ChildCollection) 60 | .parentCollectionName 61 | : collectionDefinition.name, 62 | rootId ? rootId : id, 63 | indexKeySeparator 64 | ), 65 | { convertEmptyValues: false } 66 | ), 67 | [primaryKey.sortKey]: convertToAttr( 68 | assemblePrimaryKeyValue(collection, id, indexKeySeparator), 69 | { convertEmptyValues: false } 70 | ), 71 | }, 72 | }, 73 | }; 74 | } 75 | ); 76 | 77 | const request = { TransactItems: transactGetItems }; 78 | debugDynamo('TransactGetItems', request); 79 | const command = new TransactGetItemsCommand(request); 80 | const { Responses = [] } = await ctx.ddb.send(command); 81 | 82 | const returnedItems = []; 83 | for (const response of Responses) { 84 | if (response.Item) { 85 | const unmarshalled = unmarshall(response.Item); 86 | const item = unwrap(unmarshalled as WrappedDocument); 87 | returnedItems.push(item); 88 | } 89 | } 90 | return returnedItems; 91 | }; 92 | -------------------------------------------------------------------------------- /src/operations/transact_write.ts: -------------------------------------------------------------------------------- 1 | import { 2 | TransactionCanceledException as DDBTransactionCanceledException, 3 | ReturnConsumedCapacity, 4 | ReturnItemCollectionMetrics, 5 | TransactWriteItem, 6 | TransactWriteItemsCommand, 7 | TransactWriteItemsCommandOutput, 8 | } from '@aws-sdk/client-dynamodb'; 9 | import { isEmpty } from 'lodash'; 10 | import createDebug from 'debug'; 11 | import { CompositeCondition } from '../base/conditions'; 12 | import { 13 | IdempotentParameterMismatchException, 14 | InvalidArgumentException, 15 | InvalidFindDescriptorException, 16 | TransactionCanceledException, 17 | TransactionConflictException, 18 | TransactionInProgressException, 19 | TransactionValidationException, 20 | } from '../base/exceptions'; 21 | import { Context } from '../context'; 22 | import debugDynamo from '../debug/debugDynamo'; 23 | import { createDeleteByIdRequest } from './delete_by_id'; 24 | import { createReplaceByIdRequest } from './replace'; 25 | import { coerceError } from '../base/coerce_error'; 26 | import { createDeleteChildByIdRequest } from './delete_child_by_id'; 27 | 28 | const debug = createDebug('dynaglue:transact:write'); 29 | 30 | /** 31 | * A replace request, it helps dynaglue to identify action as Put, 32 | * required to add to the list of actions in a transaction 33 | * 34 | * @param collectionName the collection to update 35 | * @param value the document to insert or replace 36 | * @param options options to apply 37 | * @param options.condition an optional conditional expression that must be satisfied for the update to proceed 38 | */ 39 | export type TransactionReplaceRequest = { 40 | type: 'replace'; 41 | collectionName: string; 42 | value: Record; 43 | options?: { condition?: CompositeCondition }; 44 | }; 45 | 46 | /** 47 | * A delete request, it helps dynaglue to identify action as Delete, 48 | * required to add to the list of actions in a transaction 49 | * 50 | * @param collectionName the collection to update 51 | * @param id the document to delete 52 | * @param options options to apply 53 | * @param options.condition an optional conditional expression that must be satisfied for the update to proceed 54 | */ 55 | export type TransactionDeleteRequest = { 56 | type: 'delete'; 57 | collectionName: string; 58 | id: string; 59 | options?: { condition?: CompositeCondition }; 60 | }; 61 | 62 | /** 63 | * A delete request for child, it helps dynaglue to identify action as Delete but for a child collection, 64 | * required to add to the list of actions in a transaction 65 | * 66 | * @param collectionName the collection to update 67 | * @param id the document to delete 68 | * @param rootObjectId parent object id 69 | * @param options options to apply 70 | * @param options.condition an optional conditional expression that must be satisfied for the update to proceed 71 | */ 72 | export type TransactionDeleteChildRequest = { 73 | type: 'delete-child'; 74 | collectionName: string; 75 | id: string; 76 | rootObjectId: string; 77 | options?: { condition?: CompositeCondition }; 78 | }; 79 | 80 | /** 81 | * TransactionWrite can be combination of Replace (Insert or Replace), Delete or Delete a child operation 82 | */ 83 | export type TransactionWriteRequest = 84 | | TransactionReplaceRequest 85 | | TransactionDeleteRequest 86 | | TransactionDeleteChildRequest; 87 | 88 | /** 89 | * check to confirmation request is for Replace operation 90 | * @param transactionWriteRequest 91 | * @returns 92 | */ 93 | const isTransactionReplaceRequest = ( 94 | transactionWriteRequest: TransactionWriteRequest 95 | ): transactionWriteRequest is TransactionReplaceRequest => 96 | transactionWriteRequest.type === 'replace'; 97 | 98 | /** 99 | * check to confirmation request is for Delete operation 100 | * @param transactionWriteRequest 101 | * @returns 102 | */ 103 | const isTransactionDeleteRequest = ( 104 | transactionWriteRequest: TransactionWriteRequest 105 | ): transactionWriteRequest is TransactionDeleteRequest => 106 | transactionWriteRequest.type === 'delete'; 107 | 108 | /** 109 | * check to confirmation request is for child Delete operation 110 | * @param transactionWriteRequest 111 | * @returns 112 | */ 113 | const isTransactionDeleteChildRequest = ( 114 | transactionWriteRequest: TransactionWriteRequest 115 | ): transactionWriteRequest is TransactionDeleteChildRequest => 116 | transactionWriteRequest.type === 'delete-child'; 117 | 118 | /** 119 | * This operation writes to DynamoDB in a transaction. 120 | * A transaction can contain upto 25 operations (Insert, Replace or Delete) 121 | * 122 | * @category Mutation 123 | * 124 | * @param context 125 | * @param transactionWriteRequests 126 | * @throws {TransactionCanceledException} 127 | * @throws {TransactionConflictException} 128 | */ 129 | export const transactionWrite = async ( 130 | context: Context, 131 | transactionWriteRequests: TransactionWriteRequest[], 132 | options: { 133 | ReturnConsumedCapacity?: ReturnConsumedCapacity; 134 | ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics; 135 | ClientRequestToken?: string; 136 | } = {} 137 | ): Promise => { 138 | if (isEmpty(transactionWriteRequests)) { 139 | throw new InvalidArgumentException( 140 | 'At least one request should be provided' 141 | ); 142 | } else if (transactionWriteRequests.length > 100) { 143 | throw new InvalidFindDescriptorException( 144 | 'No more than 100 requests can be specified to transactionWrite' 145 | ); 146 | } 147 | 148 | const transactWriteItem: TransactWriteItem[] = 149 | transactionWriteRequests.reduce( 150 | (result: TransactWriteItem[], request: TransactionWriteRequest) => { 151 | /** Checks and create a REPLACE request for a requested item */ 152 | if (isTransactionReplaceRequest(request)) { 153 | const { collectionName, value, options } = request; 154 | const { request: putItemInput } = createReplaceByIdRequest( 155 | context, 156 | collectionName, 157 | value, 158 | options 159 | ); 160 | 161 | result.push({ Put: putItemInput }); 162 | } 163 | 164 | /** Checks and create a DELETE request for a requested item */ 165 | if (isTransactionDeleteRequest(request)) { 166 | const { collectionName, id, options } = request; 167 | 168 | const deleteItem = createDeleteByIdRequest( 169 | context, 170 | collectionName, 171 | id, 172 | options 173 | ); 174 | 175 | result.push({ Delete: deleteItem }); 176 | } 177 | 178 | /** Checks and create a DELETE Child request for a requested item */ 179 | if (isTransactionDeleteChildRequest(request)) { 180 | const { collectionName, id, rootObjectId, options } = request; 181 | 182 | const deleteItem = createDeleteChildByIdRequest( 183 | context, 184 | collectionName, 185 | id, 186 | rootObjectId, 187 | options 188 | ); 189 | 190 | result.push({ Delete: deleteItem }); 191 | } 192 | 193 | return result; 194 | }, 195 | [] 196 | ); 197 | 198 | try { 199 | const request = { TransactItems: transactWriteItem, ...options }; 200 | 201 | debugDynamo('TransactWriteItems', JSON.stringify(request)); 202 | 203 | const command = new TransactWriteItemsCommand(request); 204 | 205 | return await context.ddb.send(command); 206 | } catch (error) { 207 | const errObject = coerceError(error); 208 | debug('transact_write: error', error); 209 | 210 | if (errObject.name === 'ValidationException') { 211 | throw new TransactionValidationException( 212 | 'Multiple operations are included for same item id' 213 | ); 214 | } 215 | if ( 216 | (errObject).name === 217 | 'TransactionCanceledException' 218 | ) { 219 | throw new TransactionCanceledException( 220 | 'The entire transaction request was canceled', 221 | { 222 | cancellationReasons: (error as DDBTransactionCanceledException) 223 | .CancellationReasons, 224 | } 225 | ); 226 | } 227 | 228 | if ( 229 | (errObject).name === 230 | 'TransactionConflictException' 231 | ) { 232 | throw new TransactionConflictException( 233 | 'Another transaction or request is in progress for one of the requested item' 234 | ); 235 | } 236 | if ( 237 | (errObject).name === 238 | 'IdempotentParameterMismatchException' 239 | ) { 240 | throw new IdempotentParameterMismatchException( 241 | 'Another transaction or request with same client token', 242 | { clientRequestToken: options.ClientRequestToken } 243 | ); 244 | } 245 | if ( 246 | (errObject).name === 247 | 'TransactionInProgressException' 248 | ) { 249 | throw new TransactionInProgressException( 250 | 'Transaction is in progress with same client token', 251 | { clientRequestToken: options.ClientRequestToken } 252 | ); 253 | } 254 | throw error; 255 | } 256 | }; 257 | -------------------------------------------------------------------------------- /src/operations/update_child_by_id.ts: -------------------------------------------------------------------------------- 1 | import { Context } from '../context'; 2 | import { Updates, updateInternal } from './update_by_id'; 3 | import { DocumentWithId } from '../base/common'; 4 | import { getChildCollection, assemblePrimaryKeyValue } from '../base/util'; 5 | import { CompositeCondition } from '../base/conditions'; 6 | 7 | /** 8 | * Update a child document using its `_id` and parent `_id`. 9 | * 10 | * This operation allows you to do a partial update of a collection document i.e. without 11 | * specifying all the values (it uses DynamoDB`s `UpdateItem` operation). 12 | * 13 | * At this time, the `updates` value just updates specified key paths on the target document. 14 | * 15 | * If some of the update key paths are indexed values, the indexes will also be updated. Because 16 | * of this, you must specify all the key values in an access pattern to ensure indexes are 17 | * updated consistently. 18 | * 19 | * @category Mutation 20 | * 21 | * @param context the context 22 | * @param collectionName the collection to update 23 | * @param objectId the _id value of the object to update 24 | * @param parentObjectId: the _id value of the parent object 25 | * @param updates the set of updates to apply. 26 | * @param options options to apply 27 | * @param options.condition an optional conditional expression that must be satifisfied for the update to proceed 28 | * @returns the updated object value in its entirety. 29 | * @throws {@link CollectionNotFoundException} collection not found 30 | * @throws {@link InvalidUpdatesException} thrown when the updates object is invalid or incomplete 31 | * @throws {@link InvalidUpdateValueException} thrown when one of the update values is an invalid type 32 | */ 33 | export async function updateChildById( 34 | context: Context, 35 | collectionName: string, 36 | objectId: string, 37 | parentObjectId: string, 38 | updates: Updates, 39 | options: { condition?: CompositeCondition } = {} 40 | ): Promise { 41 | const collection = getChildCollection(context, collectionName); 42 | 43 | const key = { 44 | [collection.layout.primaryKey.partitionKey]: { 45 | S: assemblePrimaryKeyValue( 46 | collection.parentCollectionName, 47 | parentObjectId, 48 | collection.layout.indexKeySeparator 49 | ), 50 | }, 51 | [collection.layout.primaryKey.sortKey]: { 52 | S: assemblePrimaryKeyValue( 53 | collectionName, 54 | objectId, 55 | collection.layout.indexKeySeparator 56 | ), 57 | }, 58 | }; 59 | return updateInternal(context, collection, key, updates, options); 60 | } 61 | -------------------------------------------------------------------------------- /testutil/debug_tests.ts: -------------------------------------------------------------------------------- 1 | import debug from 'debug'; 2 | 3 | export const DebugTestsNamespace = 'dynaglue:dynamodb:test'; 4 | 5 | /** @internal */ 6 | const logger = debug(DebugTestsNamespace); 7 | 8 | /** 9 | * @internal 10 | * 11 | * Helper for logging dynamo requests 12 | */ 13 | export const debugTests = (message: string, data: unknown): void => { 14 | logger('%s: %O', message, data); 15 | }; 16 | -------------------------------------------------------------------------------- /testutil/dynamo_mock.ts: -------------------------------------------------------------------------------- 1 | export type DynamoMock = { 2 | [key: string]: jest.Mock<{ promise: jest.Mock> }>; 3 | }; 4 | 5 | export const createAWSError = (code: string, message: string): Error => 6 | Object.assign(new Error(message), { name: code }); 7 | 8 | export function createDynamoMock( 9 | _: string, // methodName 10 | returnValue: Record 11 | ): DynamoMock { 12 | const mockFunction = jest.fn().mockResolvedValue(returnValue); 13 | return { 14 | send: mockFunction, 15 | }; 16 | } 17 | 18 | export function createDynamoMockError( 19 | methodName: string, 20 | error: Error 21 | ): DynamoMock { 22 | return { 23 | send: jest.fn().mockRejectedValue(Object.assign(error, { methodName })), 24 | }; 25 | } 26 | -------------------------------------------------------------------------------- /testutil/local_dynamo_db.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CreateTableCommand, 3 | CreateTableInput, 4 | DeleteTableCommand, 5 | DynamoDBClient, 6 | ListTablesCommand, 7 | } from '@aws-sdk/client-dynamodb'; 8 | import { debugTests } from './debug_tests'; 9 | 10 | const showTimeTaken = (startTime: number) => 11 | `[${new Date().getTime() - startTime}ms]`; 12 | 13 | const LocalDDBTestKit = { 14 | connect: (): DynamoDBClient | null => { 15 | const startBy = new Date().getTime(); 16 | try { 17 | const localDDBClient = new DynamoDBClient({ 18 | endpoint: 'http://localhost:8000', 19 | region: 'local', 20 | }); 21 | debugTests(`${showTimeTaken(startBy)} Connected to Local DDB`, ''); 22 | return localDDBClient; 23 | } catch (error) { 24 | debugTests('Error connecting to local DDB', error); 25 | return null; 26 | } 27 | }, 28 | createTables: async ( 29 | client: DynamoDBClient, 30 | tableDefinitions: CreateTableInput[] = [] 31 | ): Promise => { 32 | const startBy = new Date().getTime(); 33 | try { 34 | await Promise.all( 35 | tableDefinitions?.map((tableDefinition) => { 36 | const createTableCmd = new CreateTableCommand(tableDefinition); 37 | return client.send(createTableCmd); 38 | }) 39 | ); 40 | 41 | debugTests(`${showTimeTaken(startBy)} tables created in local DDB`, ''); 42 | } catch (error) { 43 | debugTests('Error creating tables in local DDB', error); 44 | } 45 | }, 46 | deleteTables: async ( 47 | client: DynamoDBClient, 48 | tableNames: string[] = [] 49 | ): Promise => { 50 | const startBy = new Date().getTime(); 51 | try { 52 | await Promise.all( 53 | tableNames?.map((tableName) => { 54 | return client.send( 55 | new DeleteTableCommand({ 56 | TableName: tableName, 57 | }) 58 | ); 59 | }) 60 | ); 61 | 62 | debugTests(`${showTimeTaken(startBy)} tables deleted in local DDB`, ''); 63 | } catch (error) { 64 | debugTests('Error deleting tables in local DDB', error); 65 | } 66 | }, 67 | listTables: async (client: DynamoDBClient): Promise => { 68 | try { 69 | await client.send(new ListTablesCommand({})); 70 | } catch (error) { 71 | debugTests('Error listing tables in local DDB', error); 72 | } 73 | }, 74 | }; 75 | 76 | export default LocalDDBTestKit; 77 | -------------------------------------------------------------------------------- /testutil/setupBeforeEnv.js: -------------------------------------------------------------------------------- 1 | import { setup } from 'jest-dynalite'; 2 | import path from 'path'; 3 | 4 | setup(path.join(__dirname, '..')); 5 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "include": ["src/**/*.ts"], 4 | "exclude": ["node_modules", "dist"], 5 | "compilerOptions": { 6 | /* Basic Options */ 7 | // "incremental": true, /* Enable incremental compilation */ 8 | "target": "es2022", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 9 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 10 | // "lib": ["es2021"], /* Specify library files to be included in the compilation. */ 11 | // "allowJs": true, /* Allow javascript files to be compiled. */ 12 | // "checkJs": true, /* Report errors in .js files. */ 13 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 14 | "declaration": true, /* Generates corresponding '.d.ts' file. */ 15 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 16 | // "sourceMap": true, /* Generates corresponding '.map' file. */ 17 | // "outFile": "./", /* Concatenate and emit output to single file. */ 18 | // "outDir": "../dist", /* Redirect output structure to the directory. */ 19 | // "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 20 | "composite": false, /* Enable project compilation */ 21 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 22 | // "removeComments": true, /* Do not emit comments to output. */ 23 | "noEmit": true, /* Do not emit outputs. */ 24 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 25 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 26 | "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 27 | 28 | /* Strict Type-Checking Options */ 29 | "strict": true, /* Enable all strict type-checking options. */ 30 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 31 | // "strictNullChecks": true, /* Enable strict null checks. */ 32 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 33 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 34 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 35 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 36 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 37 | 38 | /* Additional Checks */ 39 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 40 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 41 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 42 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 43 | 44 | /* Module Resolution Options */ 45 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 46 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 47 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 48 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 49 | // "typeRoots": [], /* List of folders to include type definitions from. */ 50 | // "types": [], /* Type declaration files to be included in compilation. */ 51 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 52 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 53 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 54 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 55 | 56 | /* Source Map Options */ 57 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 58 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 59 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 60 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 61 | 62 | /* Experimental Options */ 63 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 64 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 65 | "stripInternal": true /* strip internally marked values from .d.ts files */ 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "out": "docs", 3 | "exclude": "**/*.test.ts" 4 | } 5 | --------------------------------------------------------------------------------