├── .config └── dotnet-tools.json ├── .editorconfig ├── .fantomasignore ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ └── build.yml ├── .gitignore ├── .paket └── Paket.Restore.targets ├── FSharp.AWS.DynamoDB.sln ├── License.md ├── README.md ├── RELEASE_NOTES.md ├── build.fsx ├── global.json ├── paket.dependencies ├── paket.lock ├── src └── FSharp.AWS.DynamoDB │ ├── AssemblyInfo.fs │ ├── Expression │ ├── ConditionalExpr.fs │ ├── ExprCommon.fs │ ├── ExpressionContainers.fs │ ├── ProjectionExpr.fs │ └── UpdateExpr.fs │ ├── Extensions.fs │ ├── FSharp.AWS.DynamoDB.fsproj │ ├── Picklers │ ├── CollectionPicklers.fs │ ├── Pickler.fs │ ├── PicklerResolver.fs │ ├── PrimitivePicklers.fs │ ├── PropertyMetadata.fs │ ├── RecordPickler.fs │ └── UnionPickler.fs │ ├── RecordKeySchema.fs │ ├── RecordTemplate.fs │ ├── Script.fsx │ ├── TableContext.fs │ ├── Types.fs │ ├── Utils │ ├── DynamoUtils.fs │ └── Utils.fs │ └── paket.references └── tests └── FSharp.AWS.DynamoDB.Tests ├── ConditionalExpressionTests.fs ├── FSharp.AWS.DynamoDB.Tests.fsproj ├── MetricsCollectorTests.fs ├── MultipleKeyAttributeTests.fs ├── PaginationTests.fs ├── ProjectionExpressionTests.fs ├── RecordGenerationTests.fs ├── SimpleTableOperationTests.fs ├── SparseGSITests.fs ├── UpdateExpressionTests.fs ├── Utils.fs └── paket.references /.config/dotnet-tools.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "isRoot": true, 4 | "tools": { 5 | "paket": { 6 | "version": "9.0.2", 7 | "commands": [ 8 | "paket" 9 | ] 10 | }, 11 | "fantomas": { 12 | "version": "6.2.3", 13 | "commands": [ 14 | "fantomas" 15 | ] 16 | } 17 | } 18 | } -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # To learn more about .editorconfig see https://aka.ms/editorconfigdocs 2 | 3 | # All files 4 | [*] 5 | indent_style = space 6 | 7 | # Xml files 8 | [*.xml] 9 | indent_size = 2 10 | 11 | ############################### 12 | # F# Coding Conventions # 13 | ############################### 14 | # filetypes that need to be formatted by Fantomas: 15 | [*.{fs,fsx}] 16 | 17 | # files to be ignored for Fantomas may go into this file, if present: 18 | # .fantomasignore 19 | 20 | # indentation size, default=4 21 | indent_size=4 22 | 23 | # line length before it gets broken down into multiple lines 24 | # default 120 25 | max_line_length=140 26 | 27 | # Either crlf | lf, default is system-dependent (when not specified at all) 28 | # end_of_line=crlf 29 | 30 | # Whether end-of-file has a newline, default=true 31 | insert_final_newline=true 32 | 33 | # false: f(1,2) 34 | # true: f(1, 2) 35 | # default true 36 | fsharp_space_before_parameter=true 37 | 38 | # false: Option.map(fun x -> x) 39 | # true: Option.map (fun x -> x) 40 | # default true 41 | fsharp_space_before_lowercase_invocation=true 42 | 43 | # false: x.ToString() 44 | # true: x.ToString () 45 | # default false 46 | fsharp_space_before_uppercase_invocation=false 47 | 48 | # false: new Ship(withBeans) 49 | # true: new Ship (withBeans) 50 | # default false 51 | fsharp_space_before_class_constructor=false 52 | 53 | # false: __.member Foo(x) = x 54 | # true: __.member Foo (x) = x 55 | # default false 56 | fsharp_space_before_member=false 57 | 58 | # false: type Point = { x: int; y: int } 59 | # true: type Point = { x : int; y : int } 60 | # default false 61 | fsharp_space_before_colon=false 62 | 63 | # false: (a,b,c) 64 | # true: (a, b, c) 65 | # default true 66 | fsharp_space_after_comma=true 67 | 68 | # false: [a; b; 42] 69 | # true: [a ; b ; 42] 70 | # default false 71 | fsharp_space_before_semicolon=false 72 | 73 | # false: [a;b;42] 74 | # true: [a; b; 42] 75 | # default true 76 | fsharp_space_after_semicolon=true 77 | 78 | # false: let a = [1;2;3] 79 | # true: let a = [ 1;2;3 ] 80 | # default true 81 | fsharp_space_around_delimiter=true 82 | 83 | # breaks an if-then in smaller parts if it is on one line (recommend to keep default) 84 | # default 0 85 | fsharp_max_if_then_short_width=0 86 | 87 | # breaks an if-then-else in smaller parts if it is on one line 88 | # default 40 89 | fsharp_max_if_then_else_short_width=60 90 | 91 | # breaks an infix operator expression if it is on one line 92 | # infix: a + b + c 93 | # default 50 94 | fsharp_max_infix_operator_expression=100 95 | 96 | # breaks a single-line record declaration 97 | # i.e. if this gets too wide: { X = 10; Y = 12 } 98 | # requires fsharp_record_multiline_formatter=character_width to take effect 99 | # default 40 100 | fsharp_max_record_width=80 101 | 102 | # breaks a record into one item per line if items exceed this number 103 | # i.e. if set to 1, this will be on three lines: { X = 10; Y = 12 } 104 | # requires fsharp_record_multiline_formatter=number_of_items to take effect 105 | # default 1 106 | fsharp_max_record_number_of_items=1 107 | 108 | # whether to use line-length (by counting chars) or items (by counting fields) 109 | # for the record settings above 110 | # either number_of_items or character_width 111 | # default character_width 112 | fsharp_record_multiline_formatter=character_width 113 | 114 | # breaks a single line array or list if it exceeds this size 115 | # requires fsharp_array_or_list_multiline_formatter=character_width to take effect 116 | # default 40 117 | fsharp_max_array_or_list_width=100 118 | 119 | # breaks an array or list into one item per line if items exceeds this number 120 | # i.e. if set to 1, this will be shown on three lines [1; 2; 3] 121 | # requires fsharp_array_or_list_multiline_formatter=number_of_items to take effect 122 | # default 1 123 | fsharp_max_array_or_list_number_of_items=1 124 | 125 | # whether to use line-length (by counting chars) or items (by counting fields) 126 | # for the list and array settings above 127 | # either number_of_items or character_width 128 | # default character_width 129 | fsharp_array_or_list_multiline_formatter=character_width 130 | 131 | # maximum with of a value binding, does not include keyword "let" 132 | # default 80 133 | fsharp_max_value_binding_width=140 134 | 135 | # maximum width for function and member binding (rh-side) 136 | # default 40 137 | fsharp_max_function_binding_width=100 138 | 139 | # maximum width for expressions like X.DoY().GetZ(10).Help() 140 | # default 50 141 | fsharp_max_dot_get_expression_width=80 142 | 143 | # cramped: the default way in F# to format brackets. 144 | # aligned: alternative way of formatting records, arrays and lists. This will align the braces at the same column level. 145 | # stroustrup: allow for easier reordering of members and keeping the code succinct. 146 | fsharp_multiline_bracket_style=cramped 147 | 148 | # insert a newline before a computation expression that spans multiple lines 149 | # default true 150 | fsharp_newline_before_multiline_computation_expression = false 151 | 152 | # whether a newline should be placed before members 153 | # false: type Range = 154 | # { From: float } 155 | # member this.Length = this.To - this.From 156 | # false: type Range = 157 | # { From: float } 158 | # 159 | # member this.Length = this.To - this.From 160 | # default false 161 | fsharp_newline_between_type_definition_and_members=true 162 | 163 | # if a function sign exceeds max_line_length, then: 164 | # false: do not place the equal-sign on a single line 165 | # true: place the equal-sign on a single line 166 | # default false 167 | fsharp_align_function_signature_to_indentation=false 168 | 169 | # see docs: https://fsprojects.github.io/fantomas/docs/end-users/Configuration.html#fsharp_alternative_long_member_definitions 170 | # default false 171 | fsharp_alternative_long_member_definitions=false 172 | 173 | # places closing paren in lambda on a newline in multiline lambdas 174 | # default false 175 | fsharp_multi_line_lambda_closing_newline=false 176 | 177 | # allows the 'else'-branch to be aligned at same level as 'else' if the ret type allows it 178 | # default false 179 | fsharp_experimental_keep_indent_in_branch=true 180 | 181 | # whether a bar is placed before DU 182 | # false: type MyDU = Short of int 183 | # true: type MyDU = | Short of int 184 | # default false 185 | fsharp_bar_before_discriminated_union_declaration=false 186 | 187 | # multiline, nested expressions must be surrounded by blank lines 188 | # default true 189 | fsharp_blank_lines_around_nested_multiline_expressions=false 190 | 191 | # max number of consecutive blank lines to keep 192 | # default 100 193 | fsharp_keep_max_number_of_blank_lines=3 -------------------------------------------------------------------------------- /.fantomasignore: -------------------------------------------------------------------------------- 1 | AssemblyInfo.fs 2 | paket-files/ -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Set default behavior to automatically normalize line endings. 3 | ############################################################################### 4 | * text=auto 5 | 6 | ############################################################################### 7 | # Set default behavior for command prompt diff. 8 | # 9 | # This is need for earlier builds of msysgit that does not have it on by 10 | # default for csharp files. 11 | # Note: This is only used by command line 12 | ############################################################################### 13 | #*.cs diff=csharp 14 | 15 | ############################################################################### 16 | # Set the merge driver for project and solution files 17 | # 18 | # Merging from the command prompt will add diff markers to the files if there 19 | # are conflicts (Merging from VS is not affected by the settings below, in VS 20 | # the diff markers are never inserted). Diff markers may cause the following 21 | # file extensions to fail to load in VS. An alternative would be to treat 22 | # these files as binary and thus will always conflict and require user 23 | # intervention with every merge. To do so, just uncomment the entries below 24 | ############################################################################### 25 | #*.sln merge=binary 26 | #*.csproj merge=binary 27 | #*.vbproj merge=binary 28 | #*.vcxproj merge=binary 29 | #*.vcproj merge=binary 30 | #*.dbproj merge=binary 31 | #*.fsproj merge=binary 32 | #*.lsproj merge=binary 33 | #*.wixproj merge=binary 34 | #*.modelproj merge=binary 35 | #*.sqlproj merge=binary 36 | #*.wwaproj merge=binary 37 | 38 | ############################################################################### 39 | # behavior for image files 40 | # 41 | # image files are treated as binary by default. 42 | ############################################################################### 43 | #*.jpg binary 44 | #*.png binary 45 | #*.gif binary 46 | 47 | ############################################################################### 48 | # diff behavior for common document formats 49 | # 50 | # Convert binary document formats to text before diffing them. This feature 51 | # is only available from the command line. Turn it on by uncommenting the 52 | # entries below. 53 | ############################################################################### 54 | #*.doc diff=astextplain 55 | #*.DOC diff=astextplain 56 | #*.docx diff=astextplain 57 | #*.DOCX diff=astextplain 58 | #*.dot diff=astextplain 59 | #*.DOT diff=astextplain 60 | #*.pdf diff=astextplain 61 | #*.PDF diff=astextplain 62 | #*.rtf diff=astextplain 63 | #*.RTF diff=astextplain 64 | 65 | # SourceLink support 66 | *.cs eol=lf 67 | *.fs eol=lf 68 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | ### Description 2 | 3 | > Short and descriptive example bug report title 4 | > A summary of the issue and the environment in which it occurs. If suitable, 5 | > include the steps required to reproduce the bug. 6 | 7 | ### Reproduction steps 8 | 9 | > 1. This is the first step 10 | > 2. This is the second step 11 | > 3. Further steps, etc. 12 | 13 | ### Reproduction code 14 | 15 | > `` - a link to the reduced test case (e.g. a GitHub Gist) 16 | 17 | ### Other info 18 | 19 | > Any other information you want to share that is relevant to the issue being 20 | > reported. This might include the lines of code that you have identified as 21 | > causing the bug, and potential solutions (and your opinions on their 22 | > merits). -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | ### Description 2 | 3 | > Description of the requested feature 4 | 5 | ### Potential alternatives 6 | 7 | > 1. This is the first step 8 | > 2. This is the second step 9 | > 3. Further steps, etc. 10 | 11 | ### Other info 12 | 13 | > Any other information you want to share that is relevant to the feature request -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | strategy: 8 | matrix: 9 | os: [ubuntu-latest, windows-latest, macOS-latest] 10 | dotnet: ['9.0.x'] 11 | runs-on: ${{ matrix.os }} 12 | 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Setup .NET 16 | uses: actions/setup-dotnet@v3 17 | with: 18 | dotnet-version: ${{ matrix.dotnet }} 19 | - name: Restore tools 20 | run: dotnet tool restore 21 | - name: Restore dependencies 22 | run: dotnet restore 23 | - name: Run build 24 | run: dotnet fsi build.fsx -t Build 25 | 26 | test: 27 | strategy: 28 | matrix: 29 | os: [ubuntu-latest] 30 | dotnet: ['9.0.x'] 31 | runs-on: ${{ matrix.os }} 32 | 33 | services: 34 | dynamodb-local: 35 | image: amazon/dynamodb-local 36 | ports: 37 | - 8000:8000 38 | 39 | steps: 40 | - uses: actions/checkout@v4 41 | - name: Setup .NET 42 | uses: actions/setup-dotnet@v3 43 | with: 44 | dotnet-version: ${{ matrix.dotnet }} 45 | - name: Restore tools 46 | run: dotnet tool restore 47 | - name: Restore dependencies 48 | run: dotnet restore 49 | - name: Run tests 50 | run: dotnet fsi build.fsx -t Test -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # User-specific files 5 | *.suo 6 | *.user 7 | *.userosscache 8 | *.sln.docstates 9 | 10 | # User-specific files (MonoDevelop/Xamarin Studio) 11 | *.userprefs 12 | 13 | # Build results 14 | [Dd]ebug/ 15 | [Dd]ebugPublic/ 16 | [Rr]elease/ 17 | [Rr]eleases/ 18 | x64/ 19 | x86/ 20 | build/ 21 | bld/ 22 | [Bb]in/ 23 | [Oo]bj/ 24 | 25 | # Visual Studio 2015 cache/options directory 26 | .vs/ 27 | 28 | # MSTest test Results 29 | [Tt]est[Rr]esult*/ 30 | [Bb]uild[Ll]og.* 31 | 32 | # NUNIT 33 | *.VisualState.xml 34 | TestResult.xml 35 | 36 | # Build Results of an ATL Project 37 | [Dd]ebugPS/ 38 | [Rr]eleasePS/ 39 | dlldata.c 40 | 41 | # DNX 42 | project.lock.json 43 | artifacts/ 44 | 45 | *_i.c 46 | *_p.c 47 | *_i.h 48 | *.ilk 49 | *.meta 50 | *.obj 51 | *.pch 52 | *.pdb 53 | *.pgc 54 | *.pgd 55 | *.rsp 56 | *.sbr 57 | *.tlb 58 | *.tli 59 | *.tlh 60 | *.tmp 61 | *.tmp_proj 62 | *.log 63 | *.vspscc 64 | *.vssscc 65 | .builds 66 | *.pidb 67 | *.svclog 68 | *.scc 69 | 70 | # Chutzpah Test files 71 | _Chutzpah* 72 | 73 | # Visual C++ cache files 74 | ipch/ 75 | *.aps 76 | *.ncb 77 | *.opensdf 78 | *.sdf 79 | *.cachefile 80 | 81 | # Visual Studio profiler 82 | *.psess 83 | *.vsp 84 | *.vspx 85 | 86 | # TFS 2012 Local Workspace 87 | $tf/ 88 | 89 | # Guidance Automation Toolkit 90 | *.gpState 91 | 92 | # ReSharper is a .NET coding add-in 93 | _ReSharper*/ 94 | *.[Rr]e[Ss]harper 95 | *.DotSettings.user 96 | 97 | # JustCode is a .NET coding add-in 98 | .JustCode 99 | 100 | # TeamCity is a build add-in 101 | _TeamCity* 102 | 103 | # DotCover is a Code Coverage Tool 104 | *.dotCover 105 | 106 | # NCrunch 107 | _NCrunch_* 108 | .*crunch*.local.xml 109 | 110 | # MightyMoose 111 | *.mm.* 112 | AutoTest.Net/ 113 | 114 | # Web workbench (sass) 115 | .sass-cache/ 116 | 117 | # Installshield output folder 118 | [Ee]xpress/ 119 | 120 | # DocProject is a documentation generator add-in 121 | DocProject/buildhelp/ 122 | DocProject/Help/*.HxT 123 | DocProject/Help/*.HxC 124 | DocProject/Help/*.hhc 125 | DocProject/Help/*.hhk 126 | DocProject/Help/*.hhp 127 | DocProject/Help/Html2 128 | DocProject/Help/html 129 | 130 | # Click-Once directory 131 | publish/ 132 | 133 | # Publish Web Output 134 | *.[Pp]ublish.xml 135 | *.azurePubxml 136 | ## TODO: Comment the next line if you want to checkin your 137 | ## web deploy settings but do note that will include unencrypted 138 | ## passwords 139 | #*.pubxml 140 | 141 | *.publishproj 142 | 143 | # NuGet Packages 144 | *.nupkg 145 | # The packages folder can be ignored because of Package Restore 146 | **/packages/* 147 | # Uncomment if necessary however generally it will be regenerated when needed 148 | #!**/packages/repositories.config 149 | 150 | # Windows Azure Build Output 151 | csx/ 152 | *.build.csdef 153 | 154 | # Windows Store app package directory 155 | AppPackages/ 156 | 157 | # Visual Studio cache files 158 | # files ending in .cache can be ignored 159 | *.[Cc]ache 160 | # but keep track of directories ending in .cache 161 | !*.[Cc]ache/ 162 | 163 | # Others 164 | ClientBin/ 165 | [Ss]tyle[Cc]op.* 166 | ~$* 167 | *~ 168 | *.dbmdl 169 | *.dbproj.schemaview 170 | *.pfx 171 | *.publishsettings 172 | node_modules/ 173 | orleans.codegen.cs 174 | 175 | # RIA/Silverlight projects 176 | Generated_Code/ 177 | 178 | # Backup & report files from converting an old project file 179 | # to a newer Visual Studio version. Backup files are not needed, 180 | # because we have git ;-) 181 | _UpgradeReport_Files/ 182 | Backup*/ 183 | UpgradeLog*.XML 184 | UpgradeLog*.htm 185 | 186 | # SQL Server files 187 | *.mdf 188 | *.ldf 189 | 190 | # Business Intelligence projects 191 | *.rdl.data 192 | *.bim.layout 193 | *.bim_*.settings 194 | 195 | # Microsoft Fakes 196 | FakesAssemblies/ 197 | 198 | # Node.js Tools for Visual Studio 199 | .ntvs_analysis.dat 200 | 201 | # Visual Studio 6 build log 202 | *.plg 203 | 204 | # Visual Studio 6 workspace options file 205 | *.opt 206 | 207 | # LightSwitch generated files 208 | GeneratedArtifacts/ 209 | _Pvt_Extensions/ 210 | ModelManifest.xml 211 | 212 | .fake 213 | .paket/paket.exe 214 | temp 215 | paket-files 216 | xunit.html 217 | /db 218 | .DS_Store 219 | .ionide/ 220 | .vscode/ 221 | .idea/ 222 | -------------------------------------------------------------------------------- /FSharp.AWS.DynamoDB.sln: -------------------------------------------------------------------------------- 1 | Microsoft Visual Studio Solution File, Format Version 12.00 2 | # Visual Studio 14 3 | VisualStudioVersion = 14.0.25420.1 4 | MinimumVisualStudioVersion = 10.0.40219.1 5 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".paket", ".paket", "{63297B98-5CED-492C-A5B7-A5B4F73CF142}" 6 | ProjectSection(SolutionItems) = preProject 7 | paket.dependencies = paket.dependencies 8 | paket.lock = paket.lock 9 | EndProjectSection 10 | EndProject 11 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "project", "project", "{BF60BC93-E09B-4E5F-9D85-95A519479D54}" 12 | ProjectSection(SolutionItems) = preProject 13 | build.fsx = build.fsx 14 | License.md = License.md 15 | README.md = README.md 16 | RELEASE_NOTES.md = RELEASE_NOTES.md 17 | global.json = global.json 18 | EndProjectSection 19 | EndProject 20 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{A157294B-F680-4AA4-910C-BC63840FF6E3}" 21 | EndProject 22 | Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "FSharp.AWS.DynamoDB", "src\FSharp.AWS.DynamoDB\FSharp.AWS.DynamoDB.fsproj", "{57485723-02FA-4BAF-939F-570B8B3024BA}" 23 | EndProject 24 | Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "FSharp.AWS.DynamoDB.Tests", "tests\FSharp.AWS.DynamoDB.Tests\FSharp.AWS.DynamoDB.Tests.fsproj", "{2C9367A6-2835-40D3-BEAE-00B66C7EF063}" 25 | EndProject 26 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{598B32DA-681A-4C0E-B6EC-97BE473B2A36}" 27 | EndProject 28 | Global 29 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 30 | Debug|Any CPU = Debug|Any CPU 31 | Release|Any CPU = Release|Any CPU 32 | EndGlobalSection 33 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 34 | {57485723-02FA-4BAF-939F-570B8B3024BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 35 | {57485723-02FA-4BAF-939F-570B8B3024BA}.Debug|Any CPU.Build.0 = Debug|Any CPU 36 | {57485723-02FA-4BAF-939F-570B8B3024BA}.Release|Any CPU.ActiveCfg = Release|Any CPU 37 | {57485723-02FA-4BAF-939F-570B8B3024BA}.Release|Any CPU.Build.0 = Release|Any CPU 38 | {2C9367A6-2835-40D3-BEAE-00B66C7EF063}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 39 | {2C9367A6-2835-40D3-BEAE-00B66C7EF063}.Debug|Any CPU.Build.0 = Debug|Any CPU 40 | {2C9367A6-2835-40D3-BEAE-00B66C7EF063}.Release|Any CPU.ActiveCfg = Release|Any CPU 41 | {2C9367A6-2835-40D3-BEAE-00B66C7EF063}.Release|Any CPU.Build.0 = Release|Any CPU 42 | EndGlobalSection 43 | GlobalSection(SolutionProperties) = preSolution 44 | HideSolutionNode = FALSE 45 | EndGlobalSection 46 | GlobalSection(NestedProjects) = preSolution 47 | {57485723-02FA-4BAF-939F-570B8B3024BA} = {A157294B-F680-4AA4-910C-BC63840FF6E3} 48 | {2C9367A6-2835-40D3-BEAE-00B66C7EF063} = {598B32DA-681A-4C0E-B6EC-97BE473B2A36} 49 | EndGlobalSection 50 | EndGlobal 51 | -------------------------------------------------------------------------------- /License.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Eirik George Tsarpalis 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FSharp.AWS.DynamoDB 2 | 3 | ![](https://github.com/fsprojects/FSharp.AWS.DynamoDB/workflows/Build/badge.svg) [![NuGet Badge](https://buildstats.info/nuget/FSharp.AWS.DynamoDB?includePreReleases=true)](https://www.nuget.org/packages/FSharp.AWS.DynamoDB) 4 | 5 | `FSharp.AWS.DynamoDB` is an F# wrapper over the standard `AWSSDK.DynamoDBv2` library that 6 | represents Table Items as F# records, enabling one to perform updates, queries and scans 7 | using F# quotation expressions. 8 | 9 | The API draws heavily on the corresponding [FSharp.Azure.Storage](https://github.com/fsprojects/FSharp.Azure.Storage) 10 | wrapper for Azure table storage. 11 | 12 | ## Introduction 13 | 14 | Table items can be represented using F# records: 15 | 16 | ```fsharp 17 | open FSharp.AWS.DynamoDB 18 | 19 | type WorkItemInfo = 20 | { 21 | [] 22 | ProcessId : int64 23 | [] 24 | WorkItemId : int64 25 | 26 | Name : string 27 | UUID : Guid 28 | Dependencies : Set 29 | Started : DateTimeOffset option 30 | } 31 | ``` 32 | 33 | We can now perform table operations on DynamoDB like so: 34 | 35 | ```fsharp 36 | open Amazon.DynamoDBv2 37 | open FSharp.AWS.DynamoDB.Scripting // Expose non-Async methods, e.g. PutItem/GetItem 38 | 39 | let client : IAmazonDynamoDB = ``your DynamoDB client instance`` 40 | let table = TableContext.Initialize(client, tableName = "workItems", Throughput.OnDemand) 41 | 42 | let workItem = { ProcessId = 0L; WorkItemId = 1L; Name = "Test"; UUID = guid(); Dependencies = set [ "mscorlib" ]; Started = None; SubProcesses = [ "one"; "two" ] } 43 | 44 | let key : TableKey = table.PutItem(workItem) 45 | let workItem' = table.GetItem(key) 46 | ``` 47 | 48 | Queries and scans can be performed using quoted predicates: 49 | 50 | ```fsharp 51 | let qResults = table.Query(keyCondition = <@ fun r -> r.ProcessId = 0 @>, 52 | filterCondition = <@ fun r -> r.Name = "test" @>) 53 | 54 | let sResults = table.Scan <@ fun r -> r.Started.Value >= DateTimeOffset.Now - TimeSpan.FromMinutes 1. @> 55 | ``` 56 | 57 | Values can be updated using quoted update expressions: 58 | 59 | ```fsharp 60 | let updated = table.UpdateItem(<@ fun r -> { r with Started = Some DateTimeOffset.Now } @>, 61 | preCondition = <@ fun r -> r.DateTimeOffset = None @>) 62 | ``` 63 | 64 | Or they can be updated using [the `SET`, `ADD`, `REMOVE` and `DELETE` operations of the UpdateOp` DSL](./src/FSharp.AWS.DynamoDB/Types.fs#263), 65 | which is closer to the underlying DynamoDB API: 66 | 67 | ```fsharp 68 | let updated = table.UpdateItem <@ fun r -> SET r.Name "newName" &&& ADD r.Dependencies ["MBrace.Core.dll"] @> 69 | ``` 70 | 71 | Preconditions that are not upheld are signalled via an `Exception` by the underlying AWS SDK. These can be trapped using the supplied exception filter: 72 | 73 | ```fsharp 74 | try let! updated = table.UpdateItemAsync(<@ fun r -> { r with Started = Some DateTimeOffset.Now } @>, 75 | preCondition = <@ fun r -> r.DateTimeOffset = None @>) 76 | return Some updated 77 | with Precondition.CheckFailed -> 78 | return None 79 | ``` 80 | 81 | ## Supported Field Types 82 | 83 | `FSharp.AWS.DynamoDB` supports the following field types: 84 | * Numerical types, enumerations and strings. 85 | * Array, Nullable, Guid, DateTimeOffset and TimeSpan. 86 | * F# lists 87 | * F# sets with elements of type number, string or `byte[]`. 88 | * F# maps with key of type string. 89 | * F# records and unions (recursive types not supported, nested ones are). 90 | 91 | ## Supported operators in Query Expressions 92 | 93 | Query expressions support the following F# operators in their predicates: 94 | * `Array.length`, `List.length`, `Set.count` and `Map.Count`. 95 | * `String.StartsWith` and `String.Contains`. 96 | * `Set.contains` and `Map.containsKey` **NOTE**: Only works for checking if a single value is contained in a set in the table. 97 | eg: Valid:```table.Query(<@ fun r -> r.Dependencies |> Set.contains "mscorlib" @>)``` 98 | Invalid ```table.Query(<@ fun r -> set ["Test";"Other"] |> Set.contains r.Name @>)``` 99 | * `Array.contains`,`List.contains` 100 | * `Array.isEmpty` and `List.isEmpty`. 101 | * `Option.isSome`, `Option.isNone`, `Option.Value` and `Option.get`. 102 | * `fst` and `snd` for tuple records. 103 | 104 | ## Supported operators in Update Expressions 105 | 106 | Update expressions support the following F# value constructors: 107 | * `(+)` and `(-)` in numerical and set types. 108 | * `Array.append` and `List.append` (or `@`). 109 | * List consing (`::`). 110 | * `defaultArg` on optional fields. 111 | * `Set.add` and `Set.remove`. 112 | * `Map.add` and `Map.remove`. 113 | * `Option.Value` and `Option.get`. 114 | * `fst` and `snd` for tuple records. 115 | 116 | ## Example: Representing an atomic counter as an Item in a DynamoDB Table 117 | 118 | ```fsharp 119 | type private CounterEntry = { [] Id : Guid ; Value : int64 } 120 | 121 | type Counter private (table : TableContext, key : TableKey) = 122 | 123 | member _.Value = async { 124 | let! current = table.GetItemAsync(key) 125 | return current.Value 126 | } 127 | 128 | member _.Incr() = async { 129 | let! updated = table.UpdateItemAsync(key, <@ fun e -> { e with Value = e.Value + 1L } @>) 130 | return updated.Value 131 | } 132 | 133 | static member Create(client : IAmazonDynamoDB, tableName : string) = async { 134 | let table = TableContext(client, tableName) 135 | let throughput = ProvisionedThroughput(readCapacityUnits = 10L, writeCapacityUnits = 10L) 136 | let! _desc = table.VerifyOrCreateTableAsync(Throughput.Provisioned throughput) 137 | let initialEntry = { Id = Guid.NewGuid() ; Value = 0L } 138 | let! key = table.PutItemAsync(initialEntry) 139 | return Counter(table, key) 140 | } 141 | ``` 142 | 143 | _NOTE: It's advised to split single time initialization/verification of table creation from the application logic, see [`Script.fsx`](src/FSharp.AWS.DynamoDB/Script.fsx#99) for further details_. 144 | 145 | ## Projection Expressions 146 | 147 | Projection expressions can be used to fetch a subset of table attributes, which can be useful when performing large queries: 148 | 149 | ```fsharp 150 | table.QueryProjected(<@ fun r -> r.HashKey = "Foo" @>, <@ fun r -> r.HashKey, r.Values.Nested.[0] @>) 151 | ``` 152 | 153 | the resulting value is a tuple of the specified attributes. Tuples can be of any arity but must contain non-conflicting document paths. 154 | 155 | ## Secondary Indices 156 | 157 | [Global Secondary Indices](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html) can be defined using the `GlobalSecondaryHashKey` and `GlobalSecondaryRangeKey` attributes: 158 | ```fsharp 159 | type Record = 160 | { 161 | [] HashKey : string 162 | ... 163 | [] GSIH : string 164 | [] GSIR : string 165 | } 166 | ``` 167 | 168 | Queries can now be performed on the `GSIH` and `GSIR` fields as if they were regular `HashKey` and `RangeKey` Attributes. 169 | 170 | _NOTE: Global secondary indices are created using the same provisioned throughput as for the primary keys_. 171 | 172 | [Local Secondary Indices](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LSI.html) can be defined using the `LocalSecondaryIndex` attribute: 173 | ```fsharp 174 | type Record = 175 | { 176 | [] HashKey : string 177 | [] RangeKey : Guid 178 | ... 179 | [] LSI : double 180 | } 181 | ``` 182 | 183 | Queries can now be performed using `LSI` as a secondary `RangeKey`. 184 | 185 | NB: Due to API restrictions, the secondary index support in `FSharp.AWS.DynamoDB` always [projects](https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Projection.html) `ALL` table attributes. 186 | _NOTE: A key impact of this is that it induces larger write and storage costs (each write hits two copies of everything) although it does minimize read latency due to extra 'fetch' operations - see [the LSI documentation](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LSI.html) for details._ 187 | 188 | ### Pagination 189 | 190 | Pagination is supported on both scans & queries: 191 | ```fsharp 192 | let firstPage = table.ScanPaginated(limit = 100) 193 | printfn "First 100 results = %A" firstPage.Records 194 | match firstPage.LastEvaluatedKey with 195 | | Some key -> 196 | let nextPage = table.ScanPaginated(limit = 100, exclusiveStartKey = key) 197 | ``` 198 | Note that the `exclusiveStartKey` on paginated queries must include both the table key fields and the index fields (if querying an LSI or GSI). 199 | This is accomplished via the `IndexKey` type - if constructing manually (eg deserialising a start key from an API call): 200 | ```fsharp 201 | let startKey = IndexKey.Combined(gsiHashValue, gsiRangeValue, TableKey.Hash(primaryKey)) 202 | let page = table.QueryPaginated(<@ fun t -> t.GsiHash = gsiHashValue @>, limit = 100, exclusiveStartKey = startKey) 203 | ``` 204 | 205 | ## Notes on value representation 206 | 207 | Due to restrictions of DynamoDB, it may sometimes be the case that objects are not persisted faithfully. 208 | For example, consider the following record definition: 209 | 210 | ```fsharp 211 | type Record = 212 | { 213 | [] 214 | HashKey : Guid 215 | 216 | Optional : int option option 217 | Lists : int list list 218 | } 219 | 220 | let item = { HashKey = Guid.NewGuid() ; Optional = Some None ; Lists = [[1;2];[];[3;4]] } 221 | let key = table.PutItem item 222 | ``` 223 | 224 | Subsequently recovering the given key will result in the following value: 225 | 226 | ```fsharp 227 | > table.GetItem key 228 | val it : Record = {HashKey = 8d4f0678-6def-4bc9-a0ff-577a53c1337c; 229 | Optional = None; 230 | Lists = [[1;2]; [3;4]];} 231 | ``` 232 | 233 | ## Precomputing DynamoDB Expressions 234 | 235 | It is possible to precompute a DynamoDB expression as follows: 236 | 237 | ```fsharp 238 | let precomputedConditional = table.Template.PrecomputeConditionalExpr <@ fun w -> w.Name <> "test" && w.Dependencies.Contains "mscorlib" @> 239 | ``` 240 | 241 | This precomputed conditional can now be used in place of the original expression in the `FSharp.AWS.DynamoDB` API: 242 | 243 | ```fsharp 244 | let results = table.Scan precomputedConditional 245 | ``` 246 | 247 | `FSharp.AWS.DynamoDB` also supports precomputation of parametric expressions: 248 | 249 | ```fsharp 250 | let startedBefore = table.Template.PrecomputeConditionalExpr <@ fun time w -> w.StartTime.Value <= time @> 251 | table.Scan(startedBefore (DateTimeOffset.Now - TimeSpan.FromDays 1.)) 252 | ``` 253 | 254 | (See [`Script.fsx`](src/FSharp.AWS.DynamoDB/Script.fsx) for example timings showing the relative efficiency.) 255 | 256 | ## `Transaction` 257 | 258 | `FSharp.AWS.DynamoDB` supports DynamoDB transactions via the `Transaction` class. 259 | 260 | The supported individual operations are: 261 | - `Check`: `ConditionCheck` - potentially veto the batch if the ([precompiled](#Precomputing-DynamoDB-Expressions)) `condition` is not fulfilled by the item identified by `key` 262 | - `Put`: `PutItem`-equivalent operation that upserts a supplied `item` (with an `option`al `precondition`) 263 | - `Update`: `UpdateItem`-equivalent operation that applies a specified `updater` expression to an item with a specified `key` (with an `option`al `precondition`) 264 | - `Delete`: `DeleteItem`-equivalent operation that deletes the item with a specified `key` (with an `option`al `precondition`) 265 | 266 | ```fsharp 267 | let compile = table.Template.PrecomputeConditionalExpr 268 | let doesntExistCondition = compile <@ fun t -> NOT_EXISTS t.Value @> 269 | let existsCondition = compile <@ fun t -> EXISTS t.Value @> 270 | let key = TableKey.Combined(hashKey, rangeKey) 271 | 272 | let transaction = table.CreateTransaction() 273 | 274 | transaction.Check(table, key, doesntExistCondition) 275 | transaction.Put(table, item2, None) 276 | transaction.Put(table, item3, Some existsCondition) 277 | transaction.Delete(table, table.Template.ExtractKey item5, None) 278 | 279 | do! transaction.TransactWriteItems() 280 | ``` 281 | 282 | Failed preconditions (or `Check`s) are signalled as per the underlying API: via a `TransactionCanceledException`. 283 | Use `Transaction.TransactionCanceledConditionalCheckFailed` to trap such conditions: 284 | 285 | ```fsharp 286 | try do! transaction.TransactWriteItems() 287 | return Some result 288 | with Transaction.TransactionCanceledConditionalCheckFailed -> return None 289 | ``` 290 | 291 | See [`TransactWriteItems tests`](./tests/FSharp.AWS.DynamoDB.Tests/SimpleTableOperationTests.fs#156) for more details and examples. 292 | 293 | It generally costs [double or more the Write Capacity Units charges compared to using precondition expressions](https://zaccharles.medium.com/calculating-a-dynamodb-items-size-and-consumed-capacity-d1728942eb7c) 294 | on individual operations. 295 | 296 | ## Observability 297 | 298 | Critical to any production deployment is to ensure that you have good insight into the costs your application is incurring at runtime. 299 | 300 | A hook is provided so metrics can be published via your preferred Observability provider. For example, using [Prometheus.NET](https://github.com/prometheus-net/prometheus-net): 301 | 302 | ```fsharp 303 | let dbCounter = Prometheus.Metrics.CreateCounter("aws_dynamodb_requests_total", "Count of all DynamoDB requests", "table", "operation") 304 | let processMetrics (m : RequestMetrics) = 305 | dbCounter.WithLabels(m.TableName, string m.Operation).Inc() 306 | let table = TableContext(client, tableName = "workItems", metricsCollector = processMetrics) 307 | ``` 308 | 309 | If `metricsCollector` is supplied, the requests will set `ReturnConsumedCapacity` to `ReturnConsumedCapacity.INDEX` 310 | and the `RequestMetrics` parameter will contain a list of `ConsumedCapacity` objects returned from the DynamoDB operations. 311 | 312 | ## Read consistency 313 | 314 | DynamoDB follows an [eventually consistent model](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ReadConsistency.html) by default. 315 | As a consequence, data returned from a read operation might not reflect the changes of the most recently performed write operation if they are made in quick succession. 316 | To circumvent this limitation and enforce strongly consistent reads, DynamoDB provides a `ConsistentRead` parameter for read operations. 317 | You can enable this by supplying the `consistentRead` parameter on the respective `TableContext` methods, e.g. for `GetItem`: 318 | 319 | ```fsharp 320 | async { 321 | let! key : TableKey = table.PutItemAsync(workItem) 322 | let! workItem = table.GetItemAsync(key, consistentRead = true) 323 | } 324 | ``` 325 | 326 | **Note:** strongly consistent reads are more likely to fail, have higher latency, and use more read capacity than eventually consistent reads. 327 | 328 | ## Building & Running Tests 329 | 330 | To build using the dotnet SDK: 331 | 332 | `dotnet tool restore` 333 | `dotnet build` 334 | 335 | Tests are run using dynamodb-local on port 8000. Using the docker image is recommended: 336 | 337 | `docker run -p 8000:8000 amazon/dynamodb-local` 338 | 339 | then 340 | 341 | `dotnet test -c Release` 342 | 343 | ## Maintainer(s) 344 | 345 | - [@samritchie](https://github.com/samritchie) 346 | 347 | The default maintainer account for projects under "fsprojects" is [@fsprojectsgit](https://github.com/fsprojectsgit) - F# Community Project Incubation Space (repo management) 348 | -------------------------------------------------------------------------------- /RELEASE_NOTES.md: -------------------------------------------------------------------------------- 1 | ### 0.12.3-beta 2 | * Removed erroneous `Dotnet.Reproduciblebuilds` dependency [#75](https://github.com/fsprojects/FSharp.AWS.DynamoDB/pull/75) 3 | ### 0.12.2-beta 4 | * (breaking) Revised multi-table transaction API (thanks @bartelink) 5 | 6 | ### 0.12.1-beta 7 | * Added support for `defaultArg` in update expressions on the same attribute, allowing SET if_not_exists semantics (eg { record with OptionalValue = Some (defaultArg record.OptionalValue "Default") }) 8 | * Allow empty strings in non-key attributes (thanks @purkhusid) 9 | * Support multi-table transactions (thanks @purkhusid) 10 | 11 | ### 0.12.0-beta 12 | * Added support for `Array.contains` and `List.contains` to compare an attribute against multiple values (thanks @faldor20) 13 | * Added `AllowMultiple = true` for `GlobalSecondaryIndex*` Attributes to allow indices to share an attribute 14 | * Modified index selection priority to better handle string `BeginsWith` queries on inverse GSIs (thanks @matti-avilabs) 15 | * Added `ReturnValuesOnConditionCheckFailure.ALL_OLD` to include the item values in the `ConditionCheckFailedException` (thanks for the suggestion @bartelink) 16 | * (breaking) Removed [obsolete](https://learn.microsoft.com/en-us/dotnet/core/compatibility/serialization/5.0/binaryformatter-serialization-obsolete) `BinaryFormatterAttribute` 17 | 18 | ### 0.11.2-beta 19 | * Added optional `?consistentRead` parameter to Get requests (thanks @matti-avilabs) 20 | * Fixed `TransactWriteItems`: updated validation to reflect [increased limit of 100 items in service](https://aws.amazon.com/about-aws/whats-new/2022/09/amazon-dynamodb-supports-100-actions-per-transaction/) 21 | 22 | ### 0.11.1-beta 23 | * Updated internal `TypeShape` dependency to 10.0.0 24 | * Updated internal `AwaitTaskCorrect` implementation to align with [canonical version](http://www.fssnip.net/7Rc/title/AsyncAwaitTaskCorrect) [#49](https://github.com/fsprojects/FSharp.AWS.DynamoDB/pull/49) 25 | * Added SourceLink info (using `DotNet.ReproducibleBuilds`) 26 | * Fixed `TableContext.UpdateTableIfRequiredAsync`: Guard against `NullReferenceException` when `StreamSpecification` is `null` 27 | * (breaking) Changed `TableContext.UpdateTableIfRequiredAsync`/`VerifyOrCreateTableAsync` to yield `TableDescription` (in order to surface ARNs) 28 | 29 | ### 0.11.0-beta 30 | * Added `Precondition.CheckFailed` 31 | * Added `TableContext.TransactWriteItems`, `TransactWrite` DU, `TransactWriteItemsRequest.TransactionCanceledConditionalCheckFailed` 32 | 33 | ### 0.10.1-beta 34 | * Fixed accidentally removed/renamed legacy factory methods (`TableContext.Create`/`TableContext.CreateAsync`) 35 | 36 | ### 0.10.0-beta 37 | * Added `TableContext` constructor (replaces `TableContext.Create(verifyTable = false)`) 38 | * Added `TableContext.VerifyOrCreateTableAsync` (replaces `TableContext.VerifyTableAsync(createIfNotExists = true)`) 39 | * Added `TableContext.UpdateTableIfRequiredAsync` (conditional `UpdateTableAsync` to establish specified `throughput` or `streaming` only if required. Replaces `UpdateProvisionedThroughputAsync`) 40 | * Added `TableContext.Scripting.Initialize` (two overloads, replacing `TableContext.Create()` and `TableContext.Create(createIfNotExists = true)`) 41 | * Added `Throughput.OnDemand` mode (sets `BillingMode` to `PAY_PER_REQUEST`, to go with the existing support for configuring `PROVISIONED` and a `ProvisionedThroughput`) 42 | * Added ability to configure DynamoDB streaming (via a `Streaming` DU) to `VerifyOrCreateTableAsync` and `UpdateTableIfRequiredAsync` 43 | * Obsoleted `TableContext.Create` (replace with `TableContext.Scripting.Initialize`, `TableContext.VerifyOrCreateTableAsync`, `TableContext.VerifyTableAsync`) 44 | * Obsoleted `TableContext.UpdateProvisionedThroughputAsync` (replace with `TableContext.UpdateTableIfRequiredAsync`) 45 | * (breaking) Obsoleted `TableContext.VerifyTableAsync` optional argument to create a Table (replace with `VerifyOrCreateTableAsync`) 46 | * (breaking) Changed `TableKeySchemata.CreateCreateTableRequest` to `ApplyToCreateTableRequest` (with minor signature change) 47 | * (breaking; reverted in `0.10.1`) Removed `TableContext.CreateAsync` (replace with `TableContext.VerifyTableAsync` or `VerifyOrCreateTableAsync`) 48 | 49 | ### 0.9.4-beta 50 | * Moved Sync-over-Async versions of `TableContext` operations into `namespace FSharp.AWS.DynamoDB.Scripting` 51 | * Added `WithMetricsCollector()` copy method to allow separating metrics by context (eg by request) 52 | * Ensured metrics are reported even for failed requests 53 | * Added `TryGetItemAsync` (same as `GetItemAsync`, but returns `None`, instead of throwing, if an item is not present) 54 | * Switched test framework to Xunit, assertions to Unquote, runner to `dotnet test` 55 | 56 | ### 0.9.3-beta 57 | * Added `RequestMetrics` record type 58 | * Added an optional `metricsCollector` parameter to `TableContext.Create` to receive operation metrics 59 | 60 | ### 0.9.2-beta 61 | * Pinned FSharp.Core to 4.7.2, properly this time I hope 62 | 63 | ### 0.9.1-beta 64 | * Pinned FSharp.Core to 4.7.2 65 | 66 | ### 0.9.0-beta 67 | * Added `ScanPaginated*` and `QueryPaginated*` methods to `TableContext` to support paginating queries (implements #27) 68 | * Added `IndexKey` type to support additional key fields in LastEvaluatedKey for queries (ie on LSI & GSI indices) 69 | * **Breaking** renamed one of the method parameters from `filterExpr` to `filterCondition` for consistency 70 | 71 | ### 0.8.2-beta 72 | * Replace attribute name validation with something that sticks closer to the [AWS naming rules](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html) - fixes #29 73 | * Update project to net50 & bumped dependencies 74 | 75 | ### 0.8.1-beta 76 | * Replace deprecated AWS ProfileManager usage 77 | * Bumped test project netcoreapp version to 3.1 78 | * Fixed 'Invalid UpdateExpression' exception for precomputed Map.remove operations (#20) 79 | 80 | ### 0.8.0-beta 81 | * Move to netstandard2.0. 82 | 83 | ### 0.7.0-beta 84 | * Add sparse GSI Support. 85 | 86 | ### 0.6.0-beta 87 | * Preserve original offsets when persisting DateTimeOffset fields. 88 | 89 | ### 0.5.0-beta 90 | * Move converter generation to TypeShape. 91 | * Target latest unquote release. 92 | 93 | ### 0.4.1-beta 94 | * Fix packaging issue. 95 | 96 | ### 0.4.0-beta 97 | * Implement credential helper methods. 98 | 99 | ### 0.3.1-beta 100 | * Minor bugfixes. 101 | 102 | ### 0.3.0-beta 103 | * Implement secondary indices. 104 | 105 | ### 0.2.1-beta 106 | * Projection expressions bugfixes and improvements. 107 | 108 | ### 0.2.0-beta 109 | * Implement projection expressions. 110 | * Minor API improvements. 111 | * Minor bugfixes. 112 | 113 | ### 0.1.1-beta 114 | * Expose ProvisionedThroughput type to local namespace. 115 | 116 | ### 0.1.0-beta 117 | * Rename to FSharp.AWS.DynamoDB. 118 | * Add update provision throughput methods. 119 | 120 | ### 0.0.25-alpha 121 | * Improvements to Enumeration representations. 122 | * Add checks for comparison compatibility in condition expressions. 123 | 124 | ### 0.0.24-alpha 125 | * Bugfix. 126 | 127 | ### 0.0.23-alpha 128 | * Add parametric support in attribute ids and key lookups. 129 | 130 | ### 0.0.22-alpha 131 | * Add support for Array/List.isEmpty and Option.isSome/isNone in conditional expressions. 132 | 133 | ### 0.0.21-alpha 134 | * Bugfix. 135 | 136 | ### 0.0.20-alpha 137 | * Fix floating point parsing issue. 138 | 139 | ### 0.0.19-alpha 140 | * Bugfixes. 141 | 142 | ### 0.0.18-alpha 143 | * Fix API issue. 144 | 145 | ### 0.0.17-alpha 146 | * Improve exception message in case where table item is not found. 147 | 148 | ### 0.0.16-alpha 149 | * Tweak ConstanHashKey methods in RecordTemplate. 150 | 151 | ### 0.0.15-alpha 152 | * Implement GetHashKeyCondition method. 153 | 154 | ### 0.0.14-alpha 155 | * Implement update expression combiners. 156 | 157 | ### 0.0.13-alpha 158 | * Implement update expression combiners. 159 | 160 | ### 0.0.12-alpha 161 | * Make scan filter condition optional. 162 | 163 | ### 0.0.11-alpha 164 | * Implement conditional combinators. 165 | 166 | ### 0.0.10-alpha 167 | * Add support for condition expressions in delete operations. 168 | 169 | ### 0.0.9-alpha 170 | * Add support for attribute existential primitives. 171 | 172 | ### 0.0.8-alpha 173 | * Implement string representation attribute. 174 | 175 | ### 0.0.7-alpha 176 | * Support list consing in update expressions. 177 | 178 | ### 0.0.6-alpha 179 | * TableContext API refinements. 180 | 181 | ### 0.0.5-alpha 182 | * Implement parametric expressions. 183 | 184 | ### 0.0.4-alpha 185 | * Bugfix. 186 | 187 | ### 0.0.3-alpha 188 | * Revisions in TableContext API. 189 | * Support MemoryStream field types. 190 | * Implement DefaultRangeKeyAttribute. 191 | 192 | ### 0.0.2-alpha 193 | * Improve PropertySerializer API. 194 | 195 | ### 0.0.1-alpha 196 | * Initial release. 197 | -------------------------------------------------------------------------------- /build.fsx: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S dotnet fsi 2 | #r "nuget: Fake.Core.Target" 3 | 4 | // Boilerplate 5 | System.Environment.GetCommandLineArgs() 6 | |> Array.skip 2 // skip fsi.exe; build.fsx 7 | |> Array.toList 8 | |> Fake.Core.Context.FakeExecutionContext.Create false __SOURCE_FILE__ 9 | |> Fake.Core.Context.RuntimeContext.Fake 10 | |> Fake.Core.Context.setExecutionContext 11 | 12 | // -------------------------------------------------------------------------------------- 13 | // FAKE build script 14 | // -------------------------------------------------------------------------------------- 15 | #r "nuget: Fake.Core.Target" 16 | #r "nuget: Fake.Core.Process" 17 | #r "nuget: Fake.DotNet.Cli" 18 | #r "nuget: Fake.Core.ReleaseNotes" 19 | #r "nuget: Fake.DotNet.AssemblyInfoFile" 20 | #r "nuget: Fake.DotNet.Paket" 21 | #r "nuget: Fake.Tools.Git" 22 | #r "nuget: Fake.Core.Environment" 23 | #r "nuget: Fake.Core.UserInput" 24 | #r "nuget: Fake.IO.FileSystem" 25 | #r "nuget: Fake.Api.GitHub" 26 | #r "nuget: Octokit" 27 | #r "nuget: MSBuild.StructuredLogger, Version=2.2.243" 28 | 29 | open Fake.Core 30 | open Fake.DotNet 31 | open Fake.Tools 32 | open Fake.IO 33 | open Fake.IO.FileSystemOperators 34 | open Fake.IO.Globbing.Operators 35 | open Fake.Core.TargetOperators 36 | open Fake.Api 37 | 38 | // -------------------------------------------------------------------------------------- 39 | // Information about the project to be used at NuGet and in AssemblyInfo files 40 | // -------------------------------------------------------------------------------------- 41 | 42 | let project = "FSharp.AWS.DynamoDB" 43 | 44 | let summary = "An F# wrapper over the standard Amazon.DynamoDB library" 45 | 46 | let gitOwner = "fsprojects" 47 | let gitName = "FSharp.AWS.DynamoDB" 48 | let gitHome = "https://github.com/" + gitOwner 49 | 50 | // -------------------------------------------------------------------------------------- 51 | // Build variables 52 | // -------------------------------------------------------------------------------------- 53 | 54 | let buildDir = "./build/" 55 | let nugetDir = "./out/" 56 | 57 | 58 | System.Environment.CurrentDirectory <- __SOURCE_DIRECTORY__ 59 | let release = ReleaseNotes.parse (System.IO.File.ReadAllLines "RELEASE_NOTES.md") 60 | 61 | // -------------------------------------------------------------------------------------- 62 | // Helpers 63 | // -------------------------------------------------------------------------------------- 64 | let isNullOrWhiteSpace = System.String.IsNullOrWhiteSpace 65 | 66 | let exec cmd args dir = 67 | let proc = 68 | CreateProcess.fromRawCommandLine cmd args 69 | |> CreateProcess.ensureExitCodeWithMessage (sprintf "Error while running '%s' with args: %s" cmd args) 70 | (if isNullOrWhiteSpace dir then 71 | proc 72 | else 73 | proc |> CreateProcess.withWorkingDirectory dir) 74 | |> Proc.run 75 | |> ignore 76 | 77 | let getBuildParam = Environment.environVar 78 | let DoNothing = ignore 79 | // -------------------------------------------------------------------------------------- 80 | // Build Targets 81 | // -------------------------------------------------------------------------------------- 82 | 83 | Target.create "Clean" (fun _ -> Shell.cleanDirs [ buildDir; nugetDir ]) 84 | 85 | Target.create "AssemblyInfo" (fun _ -> 86 | let getAssemblyInfoAttributes projectName = 87 | [ AssemblyInfo.Title projectName 88 | AssemblyInfo.Product project 89 | AssemblyInfo.Description summary 90 | AssemblyInfo.Version release.AssemblyVersion 91 | AssemblyInfo.FileVersion release.AssemblyVersion 92 | AssemblyInfo.InternalsVisibleTo(projectName + ".Tests") ] 93 | 94 | let getProjectDetails (projectPath: string) = 95 | let projectName = System.IO.Path.GetFileNameWithoutExtension(projectPath) 96 | (projectPath, projectName, System.IO.Path.GetDirectoryName(projectPath), (getAssemblyInfoAttributes projectName)) 97 | 98 | !! "src/**/*.??proj" 99 | |> Seq.map getProjectDetails 100 | |> Seq.iter (fun (projFileName, _, folderName, attributes) -> 101 | match projFileName with 102 | | proj when proj.EndsWith("fsproj") -> AssemblyInfoFile.createFSharp (folderName "AssemblyInfo.fs") attributes 103 | | proj when proj.EndsWith("csproj") -> 104 | AssemblyInfoFile.createCSharp ((folderName "Properties") "AssemblyInfo.cs") attributes 105 | | proj when proj.EndsWith("vbproj") -> 106 | AssemblyInfoFile.createVisualBasic ((folderName "My Project") "AssemblyInfo.vb") attributes 107 | | _ -> ())) 108 | 109 | 110 | Target.create "Restore" (fun _ -> DotNet.restore id "" 111 | //exec "dotnet" "restore" "." 112 | ) 113 | 114 | Target.create "Build" (fun _ -> 115 | DotNet.build id "" 116 | exec "dotnet" "restore" ".") 117 | 118 | Target.create "Test" (fun _ -> exec "dotnet" @"test --configuration Release tests/FSharp.AWS.DynamoDB.Tests" ".") 119 | 120 | // TODO: FSharp.Formatting docs 121 | Target.create "Docs" DoNothing 122 | 123 | // -------------------------------------------------------------------------------------- 124 | // Release Targets 125 | // -------------------------------------------------------------------------------------- 126 | Target.create "BuildRelease" (fun _ -> 127 | DotNet.build 128 | (fun p -> 129 | { p with 130 | Configuration = DotNet.BuildConfiguration.Release 131 | OutputPath = Some buildDir 132 | MSBuildParams = 133 | { p.MSBuildParams with 134 | Properties = [ ("Version", release.NugetVersion); ("PackageReleaseNotes", String.concat "\n" release.Notes) ] } }) 135 | "FSharp.AWS.DynamoDB.sln") 136 | 137 | 138 | Target.create "Pack" (fun _ -> 139 | DotNet.pack 140 | (fun p -> 141 | { p with 142 | Configuration = DotNet.BuildConfiguration.Release 143 | OutputPath = Some nugetDir 144 | MSBuildParams = 145 | { p.MSBuildParams with 146 | Properties = 147 | [ ("Version", release.NugetVersion) 148 | ("PackageReleaseNotes", String.concat "\n" release.Notes) 149 | // ("IncludeSymbols", "true") 150 | // ("SymbolPackageFormat", "snupkg") // https://github.com/fsprojects/Paket/issues/3685 151 | ] } }) 152 | "FSharp.AWS.DynamoDB.sln") 153 | 154 | Target.create "ReleaseGitHub" (fun _ -> 155 | let remote = 156 | Git.CommandHelper.getGitResult "" "remote -v" 157 | |> Seq.filter (fun (s: string) -> s.EndsWith("(push)")) 158 | |> Seq.tryFind (fun (s: string) -> s.Contains(gitOwner + "/" + gitName)) 159 | |> function 160 | | None -> gitHome + "/" + gitName 161 | | Some(s: string) -> s.Split().[0] 162 | 163 | Git.Staging.stageAll "" 164 | Git.Commit.exec "" (sprintf "Bump version to %s" release.NugetVersion) 165 | Git.Branches.pushBranch "" remote (Git.Information.getBranchName "") 166 | 167 | Git.Branches.tag "" release.NugetVersion 168 | Git.Branches.pushTag "" remote release.NugetVersion 169 | 170 | let client = 171 | let token = 172 | match getBuildParam "GITHUB_TOKEN" with 173 | | s when not (isNullOrWhiteSpace s) -> s 174 | | _ -> failwith "please set the GITHUB_TOKEN environment variable to a github personal access token with repo access." 175 | 176 | GitHub.createClientWithToken token 177 | let files = !!(nugetDir "*.nupkg") 178 | 179 | // release on github 180 | let cl = 181 | client 182 | |> GitHub.draftNewRelease gitOwner gitName release.NugetVersion (release.SemVer.PreRelease <> None) release.Notes 183 | (cl, files) 184 | ||> Seq.fold (fun acc e -> acc |> GitHub.uploadFile e) 185 | |> GitHub.publishDraft 186 | |> Async.RunSynchronously) 187 | 188 | Target.create "Push" (fun _ -> 189 | let key = 190 | match getBuildParam "NUGET_KEY" with 191 | | s when not (isNullOrWhiteSpace s) -> s 192 | | _ -> UserInput.getUserPassword "NuGet Key: " 193 | let pushParams = 194 | { NuGet.NuGet.NuGetPushParams.Create() with 195 | ApiKey = Some key 196 | Source = Some "https://api.nuget.org/v3/index.json" } 197 | DotNet.nugetPush (fun o -> o.WithPushParams pushParams) (sprintf "%s**.*.nupkg" nugetDir)) 198 | 199 | // -------------------------------------------------------------------------------------- 200 | // Build order 201 | // -------------------------------------------------------------------------------------- 202 | Target.create "Default" DoNothing 203 | Target.create "Release" DoNothing 204 | 205 | "Clean" ==> "AssemblyInfo" ==> "Restore" ==> "Build" ==> "Test" ==> "Default" 206 | 207 | "Clean" ==> "AssemblyInfo" ==> "Restore" ==> "BuildRelease" ==> "Docs" 208 | 209 | "Default" ==> "Pack" ==> "ReleaseGitHub" ==> "Push" ==> "Release" 210 | 211 | Target.runOrDefaultWithArguments "Default" 212 | -------------------------------------------------------------------------------- /global.json: -------------------------------------------------------------------------------- 1 | { 2 | "sdk": { 3 | "version": "9.0.101", 4 | "rollForward": "latestMajor" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /paket.dependencies: -------------------------------------------------------------------------------- 1 | source https://api.nuget.org/v3/index.json 2 | 3 | storage: none 4 | framework: net80, netstandard20, netstandard21 5 | 6 | # copy_local: true because this is a build-time dependency only 7 | nuget Dotnet.ReproducibleBuilds copy_local: true 8 | 9 | nuget Unquote ~> 6.1.0 10 | 11 | # NOTE re `content: false` 12 | # Workaround for malformed FSharp.Core packages https://github.com/dotnet/fsharp/issues/12706 via https://github.com/fsprojects/Paket/issues/4149 13 | # Removal triggers issues in dotnet publish, e.g. for Lambda projects 14 | # Also avoids Rider search finding stuff in FSharp.Core.xml 15 | nuget FSharp.Core >= 4.7.2 content: false, lowest_matching: true 16 | 17 | nuget AWSSDK.DynamoDBv2 ~> 3.7.5 18 | 19 | github eiriktsarpalis/TypeShape:10.0.0 src/TypeShape/TypeShape.fs 20 | 21 | group Test 22 | source https://api.nuget.org/v3/index.json 23 | framework: net80 24 | 25 | nuget FsCheck 26 | nuget Microsoft.NET.Test.Sdk 27 | nuget xunit 28 | nuget xunit.runner.visualstudio -------------------------------------------------------------------------------- /paket.lock: -------------------------------------------------------------------------------- 1 | STORAGE: NONE 2 | RESTRICTION: || (== net8.0) (== netstandard2.0) (== netstandard2.1) 3 | NUGET 4 | remote: https://api.nuget.org/v3/index.json 5 | AWSSDK.Core (3.7.300.11) 6 | Microsoft.Bcl.AsyncInterfaces (>= 1.1) - restriction: || (&& (== net8.0) (< netcoreapp3.1)) (== netstandard2.0) (== netstandard2.1) 7 | AWSSDK.DynamoDBv2 (3.7.300.11) 8 | AWSSDK.Core (>= 3.7.300.11 < 4.0) 9 | DotNet.ReproducibleBuilds (1.1.1) - copy_local: true 10 | Microsoft.SourceLink.AzureRepos.Git (>= 1.1.1) 11 | Microsoft.SourceLink.Bitbucket.Git (>= 1.1.1) 12 | Microsoft.SourceLink.GitHub (>= 1.1.1) 13 | Microsoft.SourceLink.GitLab (>= 1.1.1) 14 | FSharp.Core (4.7.2) 15 | Microsoft.Bcl.AsyncInterfaces (8.0) - restriction: || (&& (== net8.0) (< netcoreapp3.1)) (== netstandard2.0) (== netstandard2.1) 16 | System.Threading.Tasks.Extensions (>= 4.5.4) - restriction: || (&& (== net8.0) (>= net462)) (&& (== net8.0) (< netstandard2.1)) (== netstandard2.0) (&& (== netstandard2.1) (>= net462)) 17 | Microsoft.Build.Tasks.Git (8.0) - copy_local: true 18 | Microsoft.SourceLink.AzureRepos.Git (8.0) - copy_local: true 19 | Microsoft.Build.Tasks.Git (>= 8.0) 20 | Microsoft.SourceLink.Common (>= 8.0) 21 | Microsoft.SourceLink.Bitbucket.Git (8.0) - copy_local: true 22 | Microsoft.Build.Tasks.Git (>= 8.0) 23 | Microsoft.SourceLink.Common (>= 8.0) 24 | Microsoft.SourceLink.Common (8.0) - copy_local: true 25 | Microsoft.SourceLink.GitHub (8.0) - copy_local: true 26 | Microsoft.Build.Tasks.Git (>= 8.0) 27 | Microsoft.SourceLink.Common (>= 8.0) 28 | Microsoft.SourceLink.GitLab (8.0) - copy_local: true 29 | Microsoft.Build.Tasks.Git (>= 8.0) 30 | Microsoft.SourceLink.Common (>= 8.0) 31 | System.Runtime.CompilerServices.Unsafe (6.0) - restriction: || (&& (== net8.0) (>= net461)) (&& (== net8.0) (>= net462)) (&& (== net8.0) (< netcoreapp2.1) (< netstandard2.1)) (&& (== net8.0) (< netstandard1.0)) (&& (== net8.0) (< netstandard2.0)) (&& (== net8.0) (>= wp8)) (== netstandard2.0) (&& (== netstandard2.1) (>= net462)) 32 | System.Threading.Tasks.Extensions (4.5.4) - restriction: || (&& (== net8.0) (>= net462)) (&& (== net8.0) (< netstandard2.1)) (== netstandard2.0) (&& (== netstandard2.1) (>= net462)) 33 | System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net8.0) (>= net461)) (&& (== net8.0) (< netcoreapp2.1)) (&& (== net8.0) (< netstandard1.0)) (&& (== net8.0) (< netstandard2.0)) (&& (== net8.0) (>= wp8)) (== netstandard2.0) (== netstandard2.1) 34 | Unquote (6.1) 35 | FSharp.Core (>= 4.7.2) 36 | GITHUB 37 | remote: eiriktsarpalis/TypeShape 38 | src/TypeShape/TypeShape.fs (6e7fe07c799de723de7e4b32d64a4fd6c1697c7f) 39 | GROUP Test 40 | RESTRICTION: == net8.0 41 | NUGET 42 | remote: https://api.nuget.org/v3/index.json 43 | FsCheck (2.16.6) 44 | FSharp.Core (>= 4.2.3) 45 | FSharp.Core (8.0.100) 46 | Microsoft.CodeCoverage (17.8) 47 | Microsoft.NET.Test.Sdk (17.8) 48 | Microsoft.CodeCoverage (>= 17.8) 49 | Microsoft.TestPlatform.TestHost (>= 17.8) 50 | Microsoft.NETCore.Platforms (7.0.4) 51 | Microsoft.TestPlatform.ObjectModel (17.8) 52 | NuGet.Frameworks (>= 6.5) 53 | System.Reflection.Metadata (>= 1.6) 54 | Microsoft.TestPlatform.TestHost (17.8) 55 | Microsoft.TestPlatform.ObjectModel (>= 17.8) 56 | Newtonsoft.Json (>= 13.0.1) 57 | NETStandard.Library (2.0.3) 58 | Microsoft.NETCore.Platforms (>= 1.1) 59 | Newtonsoft.Json (13.0.3) 60 | NuGet.Frameworks (6.8) 61 | System.Collections.Immutable (8.0) 62 | System.Reflection.Metadata (8.0) 63 | System.Collections.Immutable (>= 8.0) 64 | xunit (2.6.2) 65 | xunit.analyzers (>= 1.6) 66 | xunit.assert (>= 2.6.2) 67 | xunit.core (2.6.2) 68 | xunit.abstractions (2.0.3) 69 | xunit.analyzers (1.6) 70 | xunit.assert (2.6.2) 71 | xunit.core (2.6.2) 72 | xunit.extensibility.core (2.6.2) 73 | xunit.extensibility.execution (2.6.2) 74 | xunit.extensibility.core (2.6.2) 75 | NETStandard.Library (>= 1.6.1) 76 | xunit.abstractions (>= 2.0.3) 77 | xunit.extensibility.execution (2.6.2) 78 | NETStandard.Library (>= 1.6.1) 79 | xunit.extensibility.core (2.6.2) 80 | xunit.runner.visualstudio (2.5.4) 81 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/AssemblyInfo.fs: -------------------------------------------------------------------------------- 1 | // Auto-Generated by FAKE; do not edit 2 | namespace System 3 | open System.Reflection 4 | open System.Runtime.CompilerServices 5 | 6 | [] 7 | [] 8 | [] 9 | [] 10 | [] 11 | [] 12 | do () 13 | 14 | module internal AssemblyVersionInformation = 15 | let [] AssemblyTitle = "FSharp.AWS.DynamoDB" 16 | let [] AssemblyProduct = "FSharp.AWS.DynamoDB" 17 | let [] AssemblyDescription = "An F# wrapper over the standard Amazon.DynamoDB library" 18 | let [] AssemblyVersion = "0.12.3" 19 | let [] AssemblyFileVersion = "0.12.3" 20 | let [] InternalsVisibleTo = "FSharp.AWS.DynamoDB.Tests" 21 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Expression/ExprCommon.fs: -------------------------------------------------------------------------------- 1 | module internal FSharp.AWS.DynamoDB.ExprCommon 2 | 3 | open System 4 | open System.Collections.Generic 5 | open System.Reflection 6 | 7 | open Microsoft.FSharp.Quotations 8 | open Microsoft.FSharp.Quotations.Patterns 9 | 10 | open Amazon.DynamoDBv2.Model 11 | 12 | open Swensen.Unquote 13 | 14 | // 15 | // Implementation of recognizers for expressions of shape 'r.A.B.[0].C' 16 | // where 'r' is an F# record. 17 | // 18 | 19 | /// Nested Attribute field id 20 | type NestedAttribute = 21 | | FField of string // nested field 22 | | FIndex of int // nested list element 23 | | FParam of vIndex: int // Attribute parameter 24 | 25 | member nf.Print() = 26 | match nf with 27 | | FParam i -> sprintf "<$param%d>" i 28 | | FField f when not <| isValidFieldName f -> sprintf "map keys must be 1 to 64k long (as utf8)." |> invalidArg f 29 | | FField f -> "." + f 30 | | FIndex i -> sprintf "[%d]" i 31 | 32 | /// DynamoDB Attribute identifier 33 | type AttributeId = 34 | { RootName: string 35 | RootId: string 36 | NestedAttributes: NestedAttribute list 37 | KeySchemata: (TableKeySchema * KeyType)[] } 38 | 39 | member id.IsParametric = 40 | id.NestedAttributes 41 | |> List.exists (function 42 | | FParam _ -> true 43 | | _ -> false) 44 | 45 | member id.Id = 46 | mkString (fun append -> 47 | append id.RootId 48 | for nf in id.NestedAttributes do 49 | append <| nf.Print()) 50 | 51 | member id.Name = 52 | mkString (fun append -> 53 | append id.RootId 54 | for nf in id.NestedAttributes do 55 | append <| nf.Print()) 56 | 57 | member id.Tokens = seq { 58 | yield id.RootName 59 | yield! id.NestedAttributes |> Seq.map (fun nf -> nf.Print()) 60 | } 61 | 62 | member id.IsHashKey = 63 | List.isEmpty id.NestedAttributes 64 | && id.KeySchemata 65 | |> Array.exists (function 66 | | (_, KeyType.Hash) -> true 67 | | _ -> false) 68 | 69 | member id.IsRangeKey = 70 | List.isEmpty id.NestedAttributes 71 | && id.KeySchemata 72 | |> Array.exists (function 73 | | (_, KeyType.Range) -> true 74 | | _ -> false) 75 | 76 | member id.IsPrimaryKey = 77 | List.isEmpty id.NestedAttributes 78 | && id.KeySchemata 79 | |> Array.exists (function 80 | | ({ Type = PrimaryKey }, _) -> true 81 | | _ -> false) 82 | 83 | member id.Append nf = { id with NestedAttributes = id.NestedAttributes @ [ nf ] } 84 | member id.Apply(inputs: obj[]) = 85 | let applyField nf = 86 | match nf with 87 | | FParam i -> 88 | match inputs.[i] with 89 | | :? string as f -> FField f 90 | | :? int as i -> 91 | if i < 0 then 92 | raise <| ArgumentOutOfRangeException() 93 | else 94 | FIndex i 95 | | _ -> raise <| new InvalidCastException() 96 | | _ -> nf 97 | 98 | { id with NestedAttributes = id.NestedAttributes |> List.map applyField } 99 | 100 | static member FromKeySchema(schema: TableKeySchema) = 101 | let rootId = "#HKEY" 102 | let hkName = schema.HashKey.AttributeName 103 | { RootId = rootId 104 | RootName = hkName 105 | NestedAttributes = [] 106 | KeySchemata = [| (schema, KeyType.Hash) |] } 107 | 108 | type PropertyMetadata with 109 | 110 | /// Gets an attribute Id for given record property that 111 | /// is recognizable by DynamoDB 112 | member rp.AttrId = sprintf "#ATTR%d" rp.Index 113 | 114 | /// Represents a nested field of an F# record type 115 | type QuotedAttribute = 116 | | Root of PropertyMetadata * keyAttrs: (TableKeySchema * KeyType)[] 117 | | Nested of PropertyMetadata * parent: QuotedAttribute 118 | | Item of NestedAttribute * pickler: Pickler * parent: QuotedAttribute 119 | | Optional of pickler: Pickler * parent: QuotedAttribute 120 | 121 | /// Gets the pickler corresponding to the type pointed to by the attribute path 122 | member ap.Pickler = 123 | match ap with 124 | | Root(rp, _) -> rp.Pickler 125 | | Nested(rp, _) -> rp.Pickler 126 | | Item(_, pickler, _) -> pickler 127 | | Optional(p, _) -> p 128 | 129 | /// Gets the root record property of given attribute path 130 | member ap.RootProperty = 131 | let rec aux ap = 132 | match ap with 133 | | Root(rp, _) -> rp 134 | | Nested(_, p) -> aux p 135 | | Item(_, _, p) -> aux p 136 | | Optional(_, p) -> aux p 137 | 138 | aux ap 139 | 140 | /// Gets an attribute identifier for given Quoted attribute instace 141 | member ap.Id = 142 | let rec getTokens acc ap = 143 | match ap with 144 | | Nested(rp, p) -> getTokens (FField rp.Name :: acc) p 145 | | Item(nf, _, p) -> getTokens (nf :: acc) p 146 | | Optional(_, p) -> getTokens acc p 147 | | Root(rp, schema) -> 148 | { RootId = rp.AttrId 149 | RootName = rp.Name 150 | NestedAttributes = acc 151 | KeySchemata = schema } 152 | 153 | getTokens [] ap 154 | 155 | /// Iterates through all resolved picklers of a given attribute path 156 | member ap.Iter(f: Pickler -> unit) = 157 | let rec aux ap = 158 | match ap with 159 | | Root(rp, _) -> f rp.Pickler 160 | | Nested(rp, p) -> 161 | f rp.Pickler 162 | aux p 163 | | Item(_, pickler, p) -> 164 | f pickler 165 | aux p 166 | | Optional(pickler, p) -> 167 | f pickler 168 | aux p 169 | 170 | aux ap 171 | 172 | /// Attempt to extract an attribute path for given record info and expression 173 | static member TryExtract ((|PVar|_|): Expr -> int option) (record: Var) (info: RecordTableInfo) (e: Expr) = 174 | let tryGetPropInfo (properties: PropertyMetadata[]) isFinalProp (p: PropertyInfo) = 175 | match properties |> Array.tryFind (fun rp -> rp.PropertyInfo = p) with 176 | | None -> None 177 | | Some rp when rp.Pickler.PicklerType = PicklerType.Serialized && not isFinalProp -> 178 | invalidArg "expr" "cannot access nested properties of serialized fields." 179 | | Some rp when rp.Pickler.PicklerType = PicklerType.Union && not isFinalProp -> 180 | invalidArg "expr" "cannot access nested properties of union fields." 181 | | Some _ as r -> r 182 | 183 | let rec extractProps props e = 184 | match e with 185 | | PropertyGet(Some(Var r'), p, []) when record = r' -> 186 | match tryGetPropInfo info.Properties (List.isEmpty props) p with 187 | | None -> None 188 | | Some rp -> 189 | let root = Root(rp, info.GetPropertySchemata rp.Name) 190 | mkAttrPath root rp.NestedRecord props 191 | 192 | | SpecificProperty <@ fun (t: _ option) -> t.Value @> (Some e, [ et ], _) -> extractProps (Choice2Of3 et :: props) e 193 | 194 | | SpecificProperty <@ fun (r: _ ref) -> r.Value @> (Some e, _, _) -> 195 | let p = e.Type.GetProperty("contents") 196 | extractProps (Choice1Of3 p :: props) e 197 | 198 | | PropertyGet(Some e, p, []) -> extractProps (Choice1Of3 p :: props) e 199 | 200 | | SpecificCall2 <@ fst @> (None, _, _, [ e ]) -> 201 | let p = e.Type.GetProperty("Item1") 202 | extractProps (Choice1Of3 p :: props) e 203 | 204 | | SpecificCall2 <@ snd @> (None, _, _, [ e ]) -> 205 | let p = e.Type.GetProperty("Item2") 206 | extractProps (Choice1Of3 p :: props) e 207 | 208 | | SpecificCall2 <@ Option.get @> (None, _, [ et ], [ e ]) -> extractProps (Choice2Of3 et :: props) e 209 | 210 | | IndexGet(e, et, i) -> extractProps (Choice3Of3(et, i) :: props) e 211 | 212 | | _ -> None 213 | 214 | and mkAttrPath acc (ctx: PropertyMetadata[] option) rest = 215 | match rest, ctx with 216 | | [], _ -> Some acc 217 | | Choice1Of3 p :: tail, Some rI -> 218 | match tryGetPropInfo rI (List.isEmpty tail) p with 219 | | None -> None 220 | | Some rp -> mkAttrPath (Nested(rp, acc)) rp.NestedRecord tail 221 | 222 | | Choice2Of3 opt :: tail, None -> 223 | let pickler = Pickler.resolveUntyped opt 224 | mkAttrPath (Optional(pickler, acc)) ctx tail 225 | 226 | | Choice3Of3(et, ie) :: tail, None -> 227 | let pickler = Pickler.resolveUntyped et 228 | let ctx = 229 | match box pickler with 230 | | :? IRecordPickler as rc -> Some rc.Properties 231 | | _ -> None 232 | let inline mkAttrPath indexV = mkAttrPath (Item(indexV, pickler, acc)) ctx tail 233 | match ie with 234 | | _ when ie.IsClosed -> 235 | match evalRaw ie: obj with 236 | | :? int as i -> 237 | if i < 0 then 238 | raise <| ArgumentOutOfRangeException() 239 | else 240 | mkAttrPath (FIndex i) 241 | | :? string as f -> mkAttrPath (FField f) 242 | | _ -> None 243 | | PVar i -> mkAttrPath (FParam i) 244 | | _ -> None 245 | 246 | | _ -> None 247 | 248 | extractProps [] e 249 | 250 | /// Wrapper API for writing attribute names and values for Dynamo query expressions 251 | type AttributeWriter(names: Dictionary, values: Dictionary) = 252 | static let cmp = new AttributeValueComparer() 253 | let vcontents = new Dictionary(cmp) 254 | 255 | new() = new AttributeWriter(new Dictionary<_, _>(), new Dictionary<_, _>()) 256 | 257 | member __.Names = names 258 | member __.Values = values 259 | 260 | member __.WriteValue(av: AttributeValue) = 261 | let ok, found = vcontents.TryGetValue av 262 | if ok then 263 | found 264 | else 265 | let id = sprintf ":val%d" values.Count 266 | vcontents.Add(av, id) 267 | values.Add(id, av) 268 | id 269 | 270 | member __.WriteAttibute(attr: AttributeId) = 271 | names.[attr.RootId] <- attr.RootName 272 | attr.Id 273 | 274 | /// Recognizes exprs of shape <@ fun p1 p2 ... -> body @> 275 | let extractExprParams (recordInfo: RecordTableInfo) (expr: Expr) = 276 | let vars = new Dictionary() 277 | let rec aux i expr = 278 | match expr with 279 | | Lambda(v, body) when v.Type <> recordInfo.Type -> 280 | vars.Add(v, i) 281 | aux (i + 1) body 282 | | _ -> expr 283 | 284 | let expr' = aux 0 expr 285 | let tryFindIndex e = 286 | match e with 287 | | Var v -> 288 | let ok, i = vars.TryGetValue v 289 | if ok then Some i else None 290 | | _ -> None 291 | 292 | vars.Count, tryFindIndex, expr' 293 | 294 | // Detects conflicts in a collection of attribute paths 295 | // e.g. 'r.Foo.Bar.[0]' and 'r.Foo' are conflicting 296 | // however 'r.Foo.Bar.[0]' and 'r.Foo.Bar.[1]' are not conflicting 297 | type private AttributeNode = { Value: string; Children: ResizeArray } 298 | /// Detects conflicts in a collection of attribute paths 299 | let tryFindConflictingPaths (attrs: seq) = 300 | let root = new ResizeArray() 301 | let tryAppendPath (attr: AttributeId) = 302 | let tokens = attr.Tokens 303 | let enum = tokens.GetEnumerator() 304 | let mutable ctx = root 305 | let mutable isNodeAdded = false 306 | let mutable isLeafFound = false 307 | let acc = new ResizeArray<_>() 308 | while not isLeafFound && enum.MoveNext() do 309 | let t = enum.Current 310 | let child = 311 | match ctx.FindIndex(fun n -> n.Value = t) with 312 | | -1 -> 313 | isNodeAdded <- true 314 | let ch = { Value = t; Children = new ResizeArray<_>() } 315 | ctx.Add ch 316 | ch 317 | 318 | | i -> 319 | let ch = ctx.[i] 320 | if ch.Children.Count = 0 then 321 | isLeafFound <- true 322 | ch 323 | 324 | acc.Add t 325 | ctx <- child.Children 326 | 327 | let concat xs = String.concat "" xs 328 | if isLeafFound then 329 | Some(concat tokens, concat acc) 330 | elif not isNodeAdded then 331 | while ctx.Count > 0 do 332 | let ch = ctx.[0] 333 | acc.Add ch.Value 334 | ctx <- ch.Children 335 | 336 | Some(concat tokens, concat acc) 337 | 338 | else 339 | None 340 | 341 | attrs |> Seq.tryPick tryAppendPath 342 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Expression/ExpressionContainers.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB 2 | 3 | open System 4 | 5 | open FSharp.AWS.DynamoDB.ExprCommon 6 | open FSharp.AWS.DynamoDB.ConditionalExpr 7 | open FSharp.AWS.DynamoDB.UpdateExpr 8 | open FSharp.AWS.DynamoDB.ProjectionExpr 9 | 10 | // 11 | // Public converted condition expression wrapper implementations 12 | // 13 | 14 | /// Represents a condition expression for a given record type 15 | [] 16 | type ConditionExpression<'TRecord> internal (cond: ConditionalExpression) = 17 | let data = lazy (cond.GetDebugData()) 18 | /// Gets whether given conditional is a valid key condition 19 | member __.IsKeyConditionCompatible = cond.IsKeyConditionCompatible 20 | /// Gets the infered local secondary index for the query, if applicable 21 | member __.KeyCondition = cond.KeyCondition 22 | /// Secondary index name for condition expression, if applicable 23 | member __.IndexName = cond.IndexName 24 | 25 | /// Internal condition expression object 26 | member internal __.Conditional = cond 27 | /// DynamoDB condition expression string 28 | member __.Expression = let expr, _, _ = data.Value in expr 29 | /// DynamoDB attribute names 30 | member __.Names = let _, names, _ = data.Value in names 31 | /// DynamoDB attribute values 32 | member __.Values = let _, _, values = data.Value in values 33 | 34 | override __.Equals(other: obj) = 35 | match other with 36 | | :? ConditionExpression<'TRecord> as other -> cond.QueryExpr = other.Conditional.QueryExpr 37 | | _ -> false 38 | 39 | override __.GetHashCode() = hash cond.QueryExpr 40 | 41 | type ConditionExpression = 42 | 43 | /// 44 | /// Applies the AND operation on two conditionals 45 | /// 46 | static member And(left: ConditionExpression<'TRecord>, right: ConditionExpression<'TRecord>) = 47 | let qExpr = QueryExpr.EAnd left.Conditional.QueryExpr right.Conditional.QueryExpr 48 | ensureNotTautological qExpr 49 | new ConditionExpression<'TRecord>({ QueryExpr = qExpr; KeyCondition = extractKeyCondition qExpr; NParams = 0 }) 50 | 51 | /// 52 | /// Applies the OR operation on two conditionals 53 | /// 54 | static member Or(left: ConditionExpression<'TRecord>, right: ConditionExpression<'TRecord>) = 55 | let qExpr = QueryExpr.EOr left.Conditional.QueryExpr right.Conditional.QueryExpr 56 | ensureNotTautological qExpr 57 | new ConditionExpression<'TRecord>({ QueryExpr = qExpr; KeyCondition = extractKeyCondition qExpr; NParams = 0 }) 58 | 59 | /// 60 | /// Applies the NOT operation on a conditional 61 | /// 62 | static member Not(conditional: ConditionExpression<'TRecord>) = 63 | let qExpr = QueryExpr.ENot conditional.Conditional.QueryExpr 64 | ensureNotTautological qExpr 65 | new ConditionExpression<'TRecord>({ QueryExpr = qExpr; KeyCondition = extractKeyCondition qExpr; NParams = 0 }) 66 | 67 | /// Represents an update expression for a given record type 68 | [] 69 | type UpdateExpression<'TRecord> internal (updateOps: UpdateOperations) = 70 | let data = lazy (updateOps.GetDebugData()) 71 | /// Internal update expression object 72 | member internal __.UpdateOps = updateOps 73 | /// DynamoDB update expression string 74 | member __.Expression = let expr, _, _ = data.Value in expr 75 | /// DynamoDB attribute names 76 | member __.Names = let _, names, _ = data.Value in names 77 | /// DynamoDB attribute values 78 | member __.Values = let _, _, values = data.Value in values 79 | 80 | override __.Equals(other: obj) = 81 | match other with 82 | | :? UpdateExpression<'TRecord> as other -> updateOps.UpdateOps = other.UpdateOps.UpdateOps 83 | | _ -> false 84 | 85 | override __.GetHashCode() = hash updateOps.UpdateOps 86 | 87 | static member (&&&)(this: UpdateExpression<'TRecord>, that: UpdateExpression<'TRecord>) = UpdateExpression.Combine(this, that) 88 | 89 | and UpdateExpression = 90 | /// Combines a collection of compatible update expressions into a single expression. 91 | static member Combine([] exprs: UpdateExpression<'TRecord>[]) = 92 | match exprs with 93 | | [||] -> invalidArg "exprs" "must specify at least one update expression." 94 | | [| expr |] -> expr 95 | | _ -> 96 | 97 | let uops = exprs |> Array.collect (fun e -> e.UpdateOps.UpdateOps) 98 | match uops |> Seq.map (fun o -> o.Attribute) |> tryFindConflictingPaths with 99 | | None -> () 100 | | Some(p1, p2) -> 101 | let msg = sprintf "found conflicting paths '%s' and '%s' being accessed in update expression." p1 p2 102 | invalidArg "expr" msg 103 | 104 | new UpdateExpression<'TRecord>({ UpdateOps = uops; NParams = 0 }) 105 | 106 | /// Represents a projection expression for a given record type 107 | [] 108 | type ProjectionExpression<'TRecord, 'TProjection> internal (expr: ProjectionExpr) = 109 | let data = lazy (expr.GetDebugData()) 110 | /// Internal projection expression object 111 | member internal __.ProjectionExpr = expr 112 | /// DynamoDB projection expression string 113 | member __.Expression = let expr, _ = data.Value in expr 114 | /// DynamoDB attribute names 115 | member __.Names = let _, names = data.Value in names 116 | 117 | member internal __.UnPickle(ro: RestObject) = expr.Ctor ro :?> 'TProjection 118 | 119 | override __.Equals(other: obj) = 120 | match other with 121 | | :? ProjectionExpression<'TRecord, 'TProjection> as other -> expr.Attributes = other.ProjectionExpr.Attributes 122 | | _ -> false 123 | 124 | override __.GetHashCode() = hash expr.Attributes 125 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Expression/ProjectionExpr.fs: -------------------------------------------------------------------------------- 1 | module internal FSharp.AWS.DynamoDB.ProjectionExpr 2 | 3 | open System 4 | open System.Collections.Generic 5 | 6 | open Microsoft.FSharp.Reflection 7 | open Microsoft.FSharp.Quotations 8 | open Microsoft.FSharp.Quotations.Patterns 9 | 10 | open Amazon.DynamoDBv2.Model 11 | 12 | open FSharp.AWS.DynamoDB.ExprCommon 13 | 14 | /////////////////////////////// 15 | // 16 | // Extracts projection expressions from an F# quotation of the form 17 | // <@ fun record -> record.A, record.B, record.B.[0].C @> 18 | // 19 | // c.f. http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html 20 | // 21 | /////////////////////////////// 22 | 23 | type AttributeId with 24 | 25 | member id.View(ro: RestObject, value: byref) : bool = 26 | let notFound rest = 27 | // only raise if the last component of the document path is missing 28 | if List.isEmpty rest then 29 | false 30 | else 31 | sprintf "Document path '%s' not found." id.Name |> KeyNotFoundException |> raise 32 | 33 | let rec aux result rest (av: AttributeValue) = 34 | match rest with 35 | | [] -> 36 | result := av 37 | true 38 | | FField f :: tl -> 39 | if av.IsMSet then 40 | let ok, nested = av.M.TryGetValue f 41 | if not ok then notFound tl else aux result tl nested 42 | else 43 | av.Print() |> sprintf "Expected map, but was '%s'." |> InvalidCastException |> raise 44 | 45 | | FIndex i :: tl -> 46 | if av.IsLSet then 47 | if i < 0 || i >= av.L.Count then 48 | sprintf "Indexed path '%s' out of range." id.Name |> ArgumentOutOfRangeException |> raise 49 | else 50 | aux result tl av.L.[i] 51 | else 52 | av.Print() |> sprintf "Expected list, but was '%s'." |> InvalidCastException |> raise 53 | 54 | | FParam _ :: _ -> sprintf "internal error; unexpected attribute path '%s'." id.Name |> invalidOp 55 | 56 | let ok, prop = ro.TryGetValue id.RootName 57 | if ok then 58 | let cell = ref null 59 | if aux cell id.NestedAttributes prop then 60 | value <- cell.Value 61 | true 62 | else 63 | false 64 | else 65 | notFound id.NestedAttributes 66 | 67 | type ProjectionExpr = 68 | { Attributes: AttributeId[] 69 | Ctor: Dictionary -> obj } 70 | 71 | 72 | static member Extract (recordInfo: RecordTableInfo) (expr: Expr<'TRecord -> 'Tuple>) = 73 | let invalidExpr () = invalidArg "expr" "supplied expression is not a valid projection." 74 | match expr with 75 | | Lambda(r, body) when r.Type = recordInfo.Type -> 76 | let (|AttributeGet|_|) expr = QuotedAttribute.TryExtract (fun _ -> None) r recordInfo expr 77 | 78 | let (|Ignore|_|) e = 79 | match e with 80 | | Value(null, t) when t = typeof -> Some() 81 | | SpecificCall2 <@ ignore @> _ -> Some() 82 | | _ -> None 83 | 84 | match body with 85 | | Ignore -> 86 | let attr = AttributeId.FromKeySchema recordInfo.PrimaryKeySchema 87 | { Attributes = [| attr |]; Ctor = fun _ -> box () } 88 | 89 | | AttributeGet qa -> 90 | let pickler = qa.Pickler 91 | let attr = qa.Id 92 | let ctor (ro: RestObject) = 93 | let mutable av = null 94 | let ok = attr.View(ro, &av) 95 | if ok then 96 | pickler.UnPickleUntyped av 97 | else 98 | pickler.DefaultValueUntyped 99 | 100 | { Attributes = [| attr |]; Ctor = ctor } 101 | 102 | | NewTuple values -> 103 | let qAttrs = 104 | values 105 | |> Seq.map (function 106 | | AttributeGet qa -> qa 107 | | _ -> invalidExpr ()) 108 | |> Seq.toArray 109 | 110 | let attrs = qAttrs |> Array.map (fun qa -> qa.Id) 111 | let picklers = qAttrs |> Array.map (fun qa -> qa.Pickler) 112 | 113 | // check for conflicting projection attributes 114 | match tryFindConflictingPaths attrs with 115 | | Some(p1, p2) -> 116 | let msg = sprintf "found conflicting paths '%s' and '%s' being accessed in projection expression." p1 p2 117 | invalidArg "expr" msg 118 | | None -> () 119 | 120 | let tupleCtor = FSharpValue.PreComputeTupleConstructor typeof<'Tuple> 121 | 122 | let ctor (ro: RestObject) = 123 | let values = Array.zeroCreate attrs.Length 124 | for i = 0 to attrs.Length - 1 do 125 | let mutable av = null 126 | let ok = attrs.[i].View(ro, &av) 127 | if ok then 128 | values.[i] <- picklers.[i].UnPickleUntyped av 129 | else 130 | values.[i] <- picklers.[i].DefaultValueUntyped 131 | 132 | tupleCtor values 133 | 134 | { Attributes = attrs; Ctor = ctor } 135 | 136 | | _ -> invalidArg "expr" "projection type must either be a single property, or tuple of properties." 137 | | _ -> invalidExpr () 138 | 139 | member __.Write(writer: AttributeWriter) = 140 | let sb = new System.Text.StringBuilder() 141 | let inline (!) (x: string) = sb.Append x |> ignore 142 | let mutable isFirst = true 143 | for attr in __.Attributes do 144 | if isFirst then isFirst <- false else ! ", " 145 | 146 | !(writer.WriteAttibute attr) 147 | 148 | sb.ToString() 149 | 150 | member __.GetDebugData() = 151 | let aw = new AttributeWriter() 152 | let expr = __.Write(aw) 153 | let names = aw.Names |> Seq.map (fun kv -> kv.Key, kv.Value) |> Seq.toList 154 | expr, names 155 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Extensions.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB 2 | 3 | open System 4 | open System.IO 5 | open System.Text.RegularExpressions 6 | 7 | open Amazon.Runtime 8 | 9 | open Microsoft.FSharp.Quotations 10 | open Amazon.Runtime.CredentialManagement 11 | 12 | /// Collection of extensions for the public API 13 | [] 14 | module Extensions = 15 | 16 | /// Precomputes a template expression 17 | let inline template<'TRecord> = RecordTemplate.Define<'TRecord>() 18 | 19 | /// A conditional which verifies that given item exists 20 | let inline itemExists<'TRecord> = template<'TRecord>.ItemExists 21 | /// A conditional which verifies that given item does not exist 22 | let inline itemDoesNotExist<'TRecord> = template<'TRecord>.ItemDoesNotExist 23 | 24 | /// Precomputes a conditional expression 25 | let inline cond (expr: Expr<'TRecord -> bool>) : ConditionExpression<'TRecord> = template<'TRecord>.PrecomputeConditionalExpr expr 26 | 27 | /// Precomputes an update expression 28 | let inline update (expr: Expr<'TRecord -> 'TRecord>) : UpdateExpression<'TRecord> = template<'TRecord>.PrecomputeUpdateExpr expr 29 | 30 | /// Precomputes an update operation expression 31 | let inline updateOp (expr: Expr<'TRecord -> UpdateOp>) : UpdateExpression<'TRecord> = template<'TRecord>.PrecomputeUpdateExpr expr 32 | 33 | /// Precomputes a projection expression 34 | let inline proj (expr: Expr<'TRecord -> 'TProjection>) : ProjectionExpression<'TRecord, 'TProjection> = 35 | template<'TRecord>.PrecomputeProjectionExpr<'TProjection> expr 36 | 37 | 38 | // simple recognizer for aws credentials file syntax 39 | // c.f. http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html 40 | let private profileRegex = Regex("\[(\S+)\]\s+aws_access_key_id\s*=\s*(\S+)\s+aws_secret_access_key\s*=\s*(\S+)", RegexOptions.Compiled) 41 | 42 | type AWSCredentials with 43 | 44 | /// 45 | /// Recovers a credentials instance from the local environment 46 | /// using the the 'AWS_ACCESS_KEY_ID' and 'AWS_SECRET_ACCESS_KEY' variables. 47 | /// 48 | static member FromEnvironmentVariables() : Amazon.Runtime.AWSCredentials = 49 | let accessKeyName = "AWS_ACCESS_KEY_ID" 50 | let secretKeyName = "AWS_SECRET_ACCESS_KEY" 51 | let getEnv x = Environment.ResolveEnvironmentVariable x 52 | 53 | match getEnv accessKeyName, getEnv secretKeyName with 54 | | null, null -> sprintf "Undefined environment variables '%s' and '%s'" accessKeyName secretKeyName |> invalidOp 55 | | null, _ -> sprintf "Undefined environment variable '%s'" accessKeyName |> invalidOp 56 | | _, null -> sprintf "Undefined environment variable '%s'" secretKeyName |> invalidOp 57 | | aK, sK -> new BasicAWSCredentials(aK, sK) :> _ 58 | 59 | /// 60 | /// Recover a set of credentials using the local credentials store. 61 | /// 62 | /// Credential store profile name. Defaults to 'default' profile. 63 | static member FromCredentialsStore(?profileName: string) : Amazon.Runtime.AWSCredentials = 64 | let credentialProfileStoreChain = new CredentialProfileStoreChain() 65 | let profileName = defaultArg profileName "default" 66 | let ok, creds = credentialProfileStoreChain.TryGetAWSCredentials(profileName) 67 | if ok then 68 | creds 69 | else 70 | let credsFile = Path.Combine(getHomePath (), ".aws", "credentials") 71 | if not <| File.Exists credsFile then 72 | sprintf "Could not locate stored credentials profile '%s'." profileName |> invalidOp 73 | 74 | let text = File.ReadAllText credsFile 75 | 76 | let matchingProfile = 77 | profileRegex.Matches text 78 | |> Seq.cast 79 | |> Seq.map (fun m -> m.Groups.[1].Value, m.Groups.[2].Value, m.Groups.[3].Value) 80 | |> Seq.tryFind (fun (pf, _, _) -> pf = profileName) 81 | 82 | match matchingProfile with 83 | | None -> sprintf "Could not locate stored credentials profile '%s'." profileName |> invalidOp 84 | | Some(_, aK, sK) -> new BasicAWSCredentials(aK, sK) :> _ 85 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/FSharp.AWS.DynamoDB.fsproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | netstandard2.0 5 | ..\..\bin\ 6 | true 7 | An idiomatic F# wrapper for the AWS DynamoDB SDK. 8 | Eirik Tsarpalis 9 | Copyright 2016 10 | MIT 11 | https://github.com/fsprojects/FSharp.AWS.DynamoDB 12 | https://avatars0.githubusercontent.com/u/6001315 13 | fsharp, f#, aws, amazon, dynamodb 14 | 15 | 16 | 17 | 18 | True 19 | TypeShape/TypeShape.fs 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/CollectionPicklers.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.CollectionPicklers 3 | 4 | open System.IO 5 | 6 | open Amazon.DynamoDBv2.Model 7 | 8 | open FSharp.AWS.DynamoDB 9 | 10 | // 11 | // Pickler implementations for collection types 12 | // 13 | 14 | type ListPickler<'List, 'T when 'List :> seq<'T>>(ctor: seq<'T> -> 'List, nullV: 'List, tp: Pickler<'T>) = 15 | inherit Pickler<'List>() 16 | override __.PickleType = PickleType.List 17 | override __.PicklerType = PicklerType.Value 18 | override __.DefaultValue = ctor [||] 19 | override __.PickleCoerced obj = 20 | match obj with 21 | | null -> Some <| AttributeValue(NULL = true) 22 | | :? 'T as t -> 23 | match tp.Pickle t with 24 | | None -> None 25 | | Some av -> Some <| AttributeValue(L = rlist [| av |]) 26 | | _ -> 27 | let rl = unbox> obj |> Seq.choose tp.Pickle |> rlist 28 | if rl.Count = 0 then 29 | None 30 | else 31 | Some <| AttributeValue(L = rl) 32 | 33 | override __.Pickle list = __.PickleCoerced list 34 | 35 | override __.UnPickle a = 36 | if a.NULL then nullV 37 | elif a.IsLSet then a.L |> Seq.map tp.UnPickle |> ctor 38 | else invalidCast a 39 | 40 | interface ICollectionPickler with 41 | member __.ElementPickler = tp :> _ 42 | 43 | 44 | 45 | type BytesSetPickler() = 46 | inherit Pickler>() 47 | override __.PickleType = PickleType.BytesSet 48 | override __.PicklerType = PicklerType.Value 49 | override __.DefaultValue = Set.empty 50 | override __.PickleCoerced obj = 51 | match obj with 52 | | null -> Some <| AttributeValue(NULL = true) 53 | | :? (byte[]) as bs -> 54 | if bs.Length = 0 then 55 | None 56 | else 57 | Some <| AttributeValue(BS = rlist [| new MemoryStream(bs) |]) 58 | 59 | | _ -> 60 | let rl = 61 | unbox> obj 62 | |> Seq.choose (fun bs -> if bs.Length = 0 then None else Some(new MemoryStream(bs))) 63 | |> rlist 64 | 65 | if rl.Count = 0 then 66 | None 67 | else 68 | Some <| AttributeValue(BS = rl) 69 | 70 | override __.Pickle bss = __.PickleCoerced bss 71 | 72 | override __.UnPickle a = 73 | if a.NULL then 74 | Set.empty 75 | elif a.IsBSSet then 76 | a.BS |> Seq.map (fun ms -> ms.ToArray()) |> set 77 | else 78 | invalidCast a 79 | 80 | interface ICollectionPickler with 81 | member __.ElementPickler = new ByteArrayPickler() :> _ 82 | 83 | 84 | 85 | type NumSetPickler<'T when 'T: comparison>(tp: NumRepresentablePickler<'T>) = 86 | inherit Pickler>() 87 | override __.DefaultValue = Set.empty 88 | override __.PickleType = PickleType.NumberSet 89 | override __.PicklerType = PicklerType.Value 90 | override __.PickleCoerced obj = 91 | match obj with 92 | | null -> Some <| AttributeValue(NULL = true) 93 | | :? 'T as t -> Some <| AttributeValue(NS = rlist [| tp.UnParse t |]) 94 | | _ -> 95 | let rl = obj |> unbox> |> Seq.map tp.UnParse |> rlist 96 | if rl.Count = 0 then 97 | None 98 | else 99 | Some <| AttributeValue(NS = rl) 100 | 101 | override __.Pickle set = __.PickleCoerced set 102 | 103 | override __.UnPickle a = 104 | if a.NULL then Set.empty 105 | elif a.IsNSSet then a.NS |> Seq.map tp.Parse |> set 106 | else invalidCast a 107 | 108 | interface ICollectionPickler with 109 | member __.ElementPickler = tp :> _ 110 | 111 | 112 | 113 | type StringSetPickler<'T when 'T: comparison>(tp: StringRepresentablePickler<'T>) = 114 | inherit Pickler>() 115 | override __.DefaultValue = Set.empty 116 | override __.PickleType = PickleType.StringSet 117 | override __.PicklerType = PicklerType.Value 118 | override __.PickleCoerced obj = 119 | match obj with 120 | | null -> AttributeValue(NULL = true) |> Some 121 | | :? 'T as t -> AttributeValue(SS = rlist [| tp.UnParse t |]) |> Some 122 | | _ -> 123 | let rl = obj |> unbox> |> Seq.map tp.UnParse |> rlist 124 | if rl.Count = 0 then 125 | None 126 | else 127 | AttributeValue(SS = rl) |> Some 128 | 129 | override __.Pickle set = __.PickleCoerced set 130 | 131 | override __.UnPickle a = 132 | if a.NULL then Set.empty 133 | elif a.IsSSSet then a.SS |> Seq.map tp.Parse |> set 134 | else invalidCast a 135 | 136 | interface ICollectionPickler with 137 | member __.ElementPickler = tp :> _ 138 | 139 | let mkSetPickler<'T when 'T: comparison> (tp: Pickler<'T>) : Pickler> = 140 | if typeof<'T> = typeof then 141 | BytesSetPickler() |> unbox 142 | else 143 | match tp with 144 | | :? NumRepresentablePickler<'T> as tc -> NumSetPickler<'T>(tc) :> _ 145 | | :? StringRepresentablePickler<'T> as tc -> StringSetPickler<'T>(tc) :> _ 146 | | _ -> UnSupportedType.Raise typeof> 147 | 148 | 149 | 150 | type MapPickler<'Value>(vp: Pickler<'Value>) = 151 | inherit Pickler>() 152 | override __.PickleType = PickleType.Map 153 | override __.PicklerType = PicklerType.Value 154 | override __.DefaultValue = Map.empty 155 | override __.Pickle map = 156 | if isNull map then 157 | AttributeValue(NULL = true) |> Some 158 | elif map.Count = 0 then 159 | None 160 | else 161 | let m = 162 | map 163 | |> Seq.choose (fun kv -> 164 | if not <| isValidFieldName kv.Key then 165 | let msg = sprintf "unsupported key name '%s'. should be 1 to 64k long (as utf8)." kv.Key 166 | invalidArg "map" msg 167 | 168 | match vp.Pickle kv.Value with 169 | | None -> None 170 | | Some av -> Some(keyVal kv.Key av)) 171 | |> cdict 172 | 173 | if m.Count = 0 then 174 | None 175 | else 176 | 177 | AttributeValue(M = m) |> Some 178 | 179 | 180 | override __.UnPickle a = 181 | if a.NULL then 182 | Map.empty 183 | elif a.IsMSet then 184 | a.M |> Seq.map (fun kv -> kv.Key, vp.UnPickle kv.Value) |> Map.ofSeq 185 | else 186 | invalidCast a 187 | 188 | interface ICollectionPickler with 189 | member __.ElementPickler = vp :> _ 190 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/Pickler.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.Pickler 3 | 4 | open System 5 | open System.Collections.Generic 6 | 7 | open Amazon.DynamoDBv2.Model 8 | 9 | open FSharp.AWS.DynamoDB 10 | 11 | type RestObject = Dictionary 12 | 13 | /// Pickle representation type in an AttributeValue instance 14 | type PickleType = 15 | | Number = 01 16 | | String = 02 17 | | Bool = 03 18 | | Bytes = 04 19 | | StringSet = 05 20 | | NumberSet = 06 21 | | BytesSet = 07 22 | | List = 08 23 | | Map = 09 24 | 25 | /// Pickler implementation type 26 | type PicklerType = 27 | | Value = 01 28 | | Wrapper = 02 29 | | Record = 03 30 | | Union = 04 31 | | Serialized = 05 32 | | Enum = 06 33 | 34 | /// Untyped pickler base class 35 | [] 36 | type Pickler() = 37 | /// Type that the given instance is responsible for pickling 38 | abstract Type: Type 39 | /// Type used for pickling value in DynamoDB 40 | abstract PickleType: PickleType 41 | /// Type of pickler that was generated 42 | abstract PicklerType: PicklerType 43 | 44 | /// Default value to be used in case of missing attribute in table 45 | abstract DefaultValueUntyped: obj 46 | /// Pickle value to AttributeValue instance, if applicable 47 | abstract PickleUntyped: obj -> AttributeValue option 48 | /// UnPickle value from AttributeValue instance 49 | abstract UnPickleUntyped: AttributeValue -> obj 50 | 51 | /// Pickle any object, making an effort to coerce it to current pickler type 52 | abstract PickleCoerced: obj -> AttributeValue option 53 | default __.PickleCoerced obj = __.PickleUntyped obj 54 | 55 | /// True if DynamoDB representation preserves 56 | /// comparison semantics for query expressions 57 | abstract IsComparable: bool 58 | default __.IsComparable = false 59 | 60 | /// True if scalar DynamoDB instance 61 | member __.IsScalar = 62 | match __.PickleType with 63 | | PickleType.Number 64 | | PickleType.String 65 | | PickleType.Bytes 66 | | PickleType.Bool -> true 67 | | _ -> false 68 | 69 | override p.Equals o = 70 | match o with 71 | | :? Pickler as p' -> p.Type = p'.Type && p.PicklerType = p'.PicklerType 72 | | _ -> false 73 | 74 | override p.GetHashCode() = hash2 p.Type p.PicklerType 75 | 76 | /// Typed pickler base class 77 | [] 78 | type Pickler<'T>() = 79 | inherit Pickler() 80 | 81 | /// Default value to be used in case of missing attribute in table 82 | abstract DefaultValue: 'T 83 | /// Pickle value to AttributeValue instance, if applicable 84 | abstract Pickle: 'T -> AttributeValue option 85 | /// UnPickle value from AttributeValue instance 86 | abstract UnPickle: AttributeValue -> 'T 87 | 88 | override __.Type = typeof<'T> 89 | override __.DefaultValueUntyped = __.DefaultValue :> obj 90 | override __.PickleUntyped o = __.Pickle(o :?> 'T) 91 | override __.UnPickleUntyped av = __.UnPickle av :> obj 92 | 93 | /// Represent a pickler instance that can naturally represent 94 | /// its values as strings. E.g. Guid, DateTimeOffset, number types. 95 | [] 96 | type StringRepresentablePickler<'T>() = 97 | inherit Pickler<'T>() 98 | abstract Parse: string -> 'T 99 | abstract UnParse: 'T -> string 100 | 101 | /// Represent a pickler instance that can naturally represent 102 | /// its values as numbers. E.g. numbers & enumerations. 103 | [] 104 | type NumRepresentablePickler<'T>() = 105 | inherit StringRepresentablePickler<'T>() 106 | 107 | /// Picklers of collections should implement this interface 108 | type ICollectionPickler = 109 | abstract ElementPickler: Pickler 110 | 111 | /// Interface used for generating combined picklers 112 | type IPicklerResolver = 113 | abstract Resolve: Type -> Pickler 114 | abstract Resolve<'T> : unit -> Pickler<'T> 115 | 116 | 117 | // 118 | // Common pickler utilities 119 | // 120 | 121 | let inline invalidCast (av: AttributeValue) : 'T = 122 | let msg = sprintf "could not convert value %A to type '%O'" (av.Print()) typeof<'T> 123 | raise <| new InvalidCastException(msg) 124 | 125 | let getElemPickler (pickler: Pickler) = (unbox pickler).ElementPickler 126 | 127 | type UnSupportedType = 128 | static member Raise(fieldType: Type, ?reason: string) = 129 | let message = 130 | match reason with 131 | | None -> sprintf "unsupported record field type '%O'" fieldType 132 | | Some r -> sprintf "unsupported record field type '%O': %s" fieldType r 133 | 134 | raise <| new ArgumentException(message) 135 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/PicklerResolver.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB 2 | 3 | open System 4 | open System.Collections.Concurrent 5 | open System.Collections.Generic 6 | 7 | open TypeShape 8 | 9 | // 10 | // Pickler resolution implementation 11 | // 12 | 13 | [] 14 | module private ResolverImpl = 15 | 16 | let resolvePickler (resolver: IPicklerResolver) (t: Type) : Pickler = 17 | match TypeShape.Create t with 18 | | Shape.Bool -> BoolPickler() :> _ 19 | | Shape.Byte -> mkNumericalPickler () :> _ 20 | | Shape.SByte -> mkNumericalPickler () :> _ 21 | | Shape.Int16 -> mkNumericalPickler () :> _ 22 | | Shape.Int32 -> mkNumericalPickler () :> _ 23 | | Shape.Int64 -> mkNumericalPickler () :> _ 24 | | Shape.UInt16 -> mkNumericalPickler () :> _ 25 | | Shape.UInt32 -> mkNumericalPickler () :> _ 26 | | Shape.UInt64 -> mkNumericalPickler () :> _ 27 | | Shape.Single -> mkNumericalPickler () :> _ 28 | | Shape.Decimal -> mkNumericalPickler () :> _ 29 | | Shape.Double -> DoublePickler() :> _ 30 | | Shape.Char -> CharPickler() :> _ 31 | | Shape.String -> StringPickler() :> _ 32 | | Shape.Guid -> GuidPickler() :> _ 33 | | Shape.ByteArray -> ByteArrayPickler() :> _ 34 | | Shape.TimeSpan -> TimeSpanPickler() :> _ 35 | | Shape.DateTime -> UnSupportedType.Raise(t, "please use DateTimeOffset instead.") 36 | | Shape.DateTimeOffset -> DateTimeOffsetPickler() :> _ 37 | | :? TypeShape -> MemoryStreamPickler() :> _ 38 | | Shape.Enum s -> 39 | s.Accept 40 | { new IEnumVisitor with 41 | member _.Visit<'Enum, 'Underlying 42 | when 'Enum: enum<'Underlying> and 'Enum: struct and 'Enum :> ValueType and 'Enum: (new: unit -> 'Enum)> 43 | () 44 | = 45 | new EnumerationPickler<'Enum, 'Underlying>() :> _ } 46 | 47 | | Shape.Nullable s -> 48 | s.Accept 49 | { new INullableVisitor with 50 | member _.Visit<'T when 'T: (new: unit -> 'T) and 'T :> ValueType and 'T: struct>() = 51 | new NullablePickler<'T>(resolver.Resolve()) :> _ } 52 | 53 | | Shape.FSharpOption s -> 54 | s.Element.Accept 55 | { new ITypeVisitor with 56 | member _.Visit<'T>() = 57 | let tp = resolver.Resolve<'T>() 58 | new OptionPickler<'T>(tp) :> _ } 59 | 60 | | Shape.Array s when s.Rank = 1 -> 61 | s.Element.Accept 62 | { new ITypeVisitor with 63 | member _.Visit<'T>() = 64 | let tp = resolver.Resolve<'T>() 65 | new ListPickler<'T[], 'T>(Seq.toArray, null, tp) :> _ } 66 | 67 | | Shape.FSharpList s -> 68 | s.Element.Accept 69 | { new ITypeVisitor with 70 | member _.Visit<'T>() = 71 | let tp = resolver.Resolve<'T>() 72 | new ListPickler<'T list, 'T>(List.ofSeq, [], tp) :> _ } 73 | 74 | | Shape.ResizeArray s -> 75 | s.Element.Accept 76 | { new ITypeVisitor with 77 | member _.Visit<'T>() = 78 | let tp = resolver.Resolve<'T>() 79 | new ListPickler, 'T>(rlist, null, tp) :> _ } 80 | 81 | | Shape.FSharpSet s -> 82 | s.Accept 83 | { new IFSharpSetVisitor with 84 | member _.Visit<'T when 'T: comparison>() = mkSetPickler<'T> (resolver.Resolve()) :> _ } 85 | 86 | | Shape.FSharpMap s -> 87 | s.Accept 88 | { new IFSharpMapVisitor with 89 | member _.Visit<'K, 'V when 'K: comparison>() = 90 | if typeof<'K> <> typeof then 91 | UnSupportedType.Raise(t, "Map types must have key of type string.") 92 | 93 | new MapPickler<'V>(resolver.Resolve()) :> _ } 94 | 95 | | Shape.Tuple _ as s -> 96 | s.Accept 97 | { new ITypeVisitor with 98 | member _.Visit<'T>() = mkTuplePickler<'T> resolver :> _ } 99 | 100 | | Shape.FSharpRecord _ as s -> 101 | s.Accept 102 | { new ITypeVisitor with 103 | member _.Visit<'T>() = mkFSharpRecordPickler<'T> resolver :> _ } 104 | 105 | | Shape.FSharpUnion _ as s -> 106 | s.Accept 107 | { new ITypeVisitor with 108 | member _.Visit<'T>() = new UnionPickler<'T>(resolver) :> _ } 109 | 110 | | _ -> UnSupportedType.Raise t 111 | 112 | type CachedResolver private () as self = 113 | static let globalCache = ConcurrentDictionary>() 114 | let stack = Stack() 115 | let resolve t = 116 | try 117 | if stack.Contains t then 118 | UnSupportedType.Raise(t, "recursive types not supported.") 119 | 120 | stack.Push t 121 | let pf = globalCache.GetOrAdd(t, (fun t -> lazy (resolvePickler self t))) 122 | let _ = stack.Pop() 123 | pf.Value 124 | 125 | with UnsupportedShape t -> 126 | UnSupportedType.Raise t 127 | 128 | interface IPicklerResolver with 129 | member _.Resolve(t: Type) = resolve t 130 | member _.Resolve<'T>() = resolve typeof<'T> :?> Pickler<'T> 131 | 132 | static member Resolve(t: Type) = 133 | let ok, found = globalCache.TryGetValue t 134 | if ok then 135 | found.Value 136 | else 137 | (CachedResolver() :> IPicklerResolver).Resolve t 138 | 139 | [] 140 | module internal Pickler = 141 | 142 | /// Resolves pickler for given type 143 | let resolveUntyped (t: Type) = CachedResolver.Resolve t 144 | /// Resolves pickler for given type 145 | let resolve<'T> () = CachedResolver.Resolve typeof<'T> :?> Pickler<'T> 146 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/PrimitivePicklers.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.PrimitivePicklers 3 | 4 | open System 5 | open System.Globalization 6 | open System.IO 7 | open System.Reflection 8 | 9 | open TypeShape 10 | open Amazon.DynamoDBv2.Model 11 | 12 | // 13 | // Pickler implementations for primitive types 14 | // 15 | 16 | type BoolPickler() = 17 | inherit StringRepresentablePickler() 18 | override _.PickleType = PickleType.Bool 19 | override _.PicklerType = PicklerType.Value 20 | override _.IsComparable = true 21 | 22 | override _.DefaultValue = false 23 | override _.Pickle b = AttributeValue(BOOL = b) |> Some 24 | override _.UnPickle a = if a.IsBOOLSet then a.BOOL else invalidCast a 25 | 26 | override _.Parse s = Boolean.Parse s 27 | override _.UnParse s = string s 28 | 29 | type StringPickler() = 30 | inherit StringRepresentablePickler() 31 | override _.PickleType = PickleType.String 32 | override _.PicklerType = PicklerType.Value 33 | override _.IsComparable = true 34 | 35 | override _.DefaultValue = null 36 | override _.Pickle s = 37 | if isNull s then 38 | AttributeValue(NULL = true) 39 | else 40 | AttributeValue(s) 41 | |> Some 42 | 43 | override _.UnPickle a = 44 | if a.NULL then null 45 | elif not <| isNull a.S then a.S 46 | else invalidCast a 47 | 48 | override _.Parse s = s 49 | override _.UnParse s = s 50 | 51 | type CharPickler() = 52 | inherit StringRepresentablePickler() 53 | override _.PickleType = PickleType.String 54 | override _.PicklerType = PicklerType.Value 55 | override _.IsComparable = true 56 | 57 | override _.DefaultValue = char 0 58 | override _.Pickle c = AttributeValue(string c) |> Some 59 | override _.UnPickle a = if not <| isNull a.S then Char.Parse(a.S) else invalidCast a 60 | 61 | override _.Parse s = Char.Parse s 62 | override _.UnParse c = string c 63 | 64 | let inline mkNumericalPickler< ^N 65 | when ^N: (static member Parse: string * IFormatProvider -> ^N) and ^N: (member ToString: IFormatProvider -> string)> 66 | () 67 | = 68 | let inline parseNum s = (^N: (static member Parse: string * IFormatProvider -> ^N) (s, CultureInfo.InvariantCulture)) 69 | 70 | let inline toString n = (^N: (member ToString: IFormatProvider -> string) (n, CultureInfo.InvariantCulture)) 71 | 72 | { new NumRepresentablePickler< ^N >() with 73 | member _.PickleType = PickleType.Number 74 | member _.PicklerType = PicklerType.Value 75 | member _.IsComparable = true 76 | 77 | member _.Parse s = parseNum s 78 | member _.UnParse e = toString e 79 | 80 | member _.DefaultValue = Unchecked.defaultof< ^N> 81 | member _.Pickle num = AttributeValue(N = toString num) |> Some 82 | member _.UnPickle a = if not <| isNull a.N then parseNum a.N else invalidCast a 83 | 84 | member x.PickleCoerced o = 85 | let n = 86 | match o with 87 | | :? ^N as n -> n 88 | | other -> string other |> parseNum 89 | x.Pickle n } 90 | 91 | type DoublePickler() = 92 | inherit NumRepresentablePickler() 93 | let parse s = Double.Parse(s, CultureInfo.InvariantCulture) 94 | let unparse (d: double) = d.ToString("G17", CultureInfo.InvariantCulture) 95 | 96 | override _.PickleType = PickleType.Number 97 | override _.PicklerType = PicklerType.Value 98 | override _.IsComparable = true 99 | 100 | override _.Parse s = parse s 101 | override _.UnParse e = unparse e 102 | 103 | override _.DefaultValue = Unchecked.defaultof 104 | override _.Pickle num = AttributeValue(N = unparse num) |> Some 105 | override _.UnPickle a = if not <| isNull a.N then parse a.N else invalidCast a 106 | 107 | override x.PickleCoerced o = 108 | let n = 109 | match o with 110 | | :? double as n -> n 111 | | other -> string other |> parse 112 | x.Pickle n 113 | 114 | type ByteArrayPickler() = 115 | inherit StringRepresentablePickler() 116 | override _.PickleType = PickleType.Bytes 117 | override _.PicklerType = PicklerType.Value 118 | override _.IsComparable = true 119 | 120 | override _.Parse s = Convert.FromBase64String s 121 | override _.UnParse b = Convert.ToBase64String b 122 | 123 | override _.DefaultValue = [||] 124 | override _.Pickle bs = 125 | if isNull bs then Some <| AttributeValue(NULL = true) 126 | elif bs.Length = 0 then None 127 | else Some <| AttributeValue(B = new MemoryStream(bs)) 128 | 129 | override _.UnPickle a = 130 | if a.NULL then null 131 | elif not <| isNull a.B then a.B.ToArray() 132 | else invalidCast a 133 | 134 | 135 | type MemoryStreamPickler() = 136 | inherit Pickler() 137 | override _.PickleType = PickleType.Bytes 138 | override _.PicklerType = PicklerType.Value 139 | 140 | override _.DefaultValue = null 141 | override _.Pickle m = 142 | if isNull m then Some <| AttributeValue(NULL = true) 143 | elif m.Length = 0L then None 144 | else Some <| AttributeValue(B = m) 145 | 146 | override _.UnPickle a = 147 | if a.NULL then null 148 | elif notNull a.B then a.B 149 | else invalidCast a 150 | 151 | type GuidPickler() = 152 | inherit StringRepresentablePickler() 153 | override _.PickleType = PickleType.String 154 | override _.PicklerType = PicklerType.Value 155 | override _.IsComparable = true 156 | 157 | override _.DefaultValue = Guid.Empty 158 | override _.Pickle g = AttributeValue(string g) |> Some 159 | override _.UnPickle a = if not <| isNull a.S then Guid.Parse a.S else invalidCast a 160 | 161 | override _.Parse s = Guid.Parse s 162 | override _.UnParse g = string g 163 | 164 | type DateTimeOffsetPickler() = 165 | inherit StringRepresentablePickler() 166 | static let isoFormat = "yyyy-MM-dd\THH:mm:ss.fffffffzzz" 167 | static let parse s = DateTimeOffset.Parse(s) 168 | static let unparse (d: DateTimeOffset) = d.ToString(isoFormat) 169 | 170 | override _.PickleType = PickleType.String 171 | override _.PicklerType = PicklerType.Value 172 | override _.IsComparable = true 173 | 174 | override _.DefaultValue = DateTimeOffset() 175 | override _.Parse s = parse s 176 | override _.UnParse d = unparse d 177 | 178 | override _.Pickle d = AttributeValue(unparse d) |> Some 179 | override _.UnPickle a = if not <| isNull a.S then parse a.S else invalidCast a 180 | 181 | 182 | type TimeSpanPickler() = 183 | inherit NumRepresentablePickler() 184 | override _.PickleType = PickleType.String 185 | override _.PicklerType = PicklerType.Value 186 | override _.IsComparable = true 187 | 188 | override _.Parse s = TimeSpan.FromTicks(int64 s) 189 | override _.UnParse t = string t.Ticks 190 | override _.DefaultValue = TimeSpan.Zero 191 | override _.Pickle t = AttributeValue(N = string t.Ticks) |> Some 192 | override _.UnPickle a = 193 | if not <| isNull a.N then 194 | TimeSpan.FromTicks(int64 a.N) 195 | else 196 | invalidCast a 197 | 198 | 199 | type EnumerationPickler<'E, 'U when 'E: enum<'U> and 'E: struct and 'E :> ValueType and 'E: (new: unit -> 'E)>() = 200 | inherit StringRepresentablePickler<'E>() 201 | override _.PickleType = PickleType.String 202 | override _.PicklerType = PicklerType.Enum 203 | 204 | override _.DefaultValue = Unchecked.defaultof<'E> 205 | override _.Pickle e = AttributeValue(S = e.ToString()) |> Some 206 | override _.UnPickle a = 207 | if notNull a.S then 208 | Enum.Parse(typeof<'E>, a.S) :?> 'E 209 | else 210 | invalidCast a 211 | 212 | override _.Parse s = Enum.Parse(typeof<'E>, s) :?> 'E 213 | override _.UnParse e = e.ToString() 214 | 215 | type NullablePickler<'T when 'T: (new: unit -> 'T) and 'T :> ValueType and 'T: struct>(tp: Pickler<'T>) = 216 | inherit Pickler>() 217 | override _.PickleType = tp.PickleType 218 | override _.PicklerType = PicklerType.Wrapper 219 | override _.IsComparable = tp.IsComparable 220 | override _.DefaultValue = Nullable<'T>() 221 | override _.Pickle n = 222 | if n.HasValue then 223 | tp.Pickle n.Value 224 | else 225 | AttributeValue(NULL = true) |> Some 226 | override _.UnPickle a = 227 | if a.NULL then 228 | Nullable<'T>() 229 | else 230 | new Nullable<'T>(tp.UnPickle a) 231 | 232 | type OptionPickler<'T>(tp: Pickler<'T>) = 233 | inherit Pickler<'T option>() 234 | override _.PickleType = tp.PickleType 235 | override _.PicklerType = PicklerType.Wrapper 236 | override _.IsComparable = tp.IsComparable 237 | override _.DefaultValue = None 238 | override _.Pickle topt = 239 | match topt with 240 | | None -> None 241 | | Some t -> tp.Pickle t 242 | override _.UnPickle a = if a.NULL then None else Some(tp.UnPickle a) 243 | override x.PickleCoerced obj = 244 | match obj with 245 | | :? 'T as t -> tp.Pickle t 246 | | :? ('T option) as topt -> x.Pickle topt 247 | | _ -> raise <| InvalidCastException() 248 | 249 | type StringRepresentationPickler<'T>(ep: StringRepresentablePickler<'T>) = 250 | inherit Pickler<'T>() 251 | override _.PickleType = PickleType.String 252 | override _.PicklerType = ep.PicklerType 253 | override _.DefaultValue = ep.DefaultValue 254 | override _.Pickle t = AttributeValue(S = ep.UnParse t) |> Some 255 | override _.UnPickle a = if notNull a.S then ep.Parse a.S else invalidCast a 256 | 257 | let mkStringRepresentationPickler (resolver: IPicklerResolver) (prop: PropertyInfo) = 258 | TypeShape.Create(prop.PropertyType).Accept 259 | { new ITypeVisitor with 260 | member _.Visit<'T>() = 261 | match resolver.Resolve<'T>() with 262 | | :? StringRepresentablePickler<'T> as tp -> 263 | if tp.PickleType = PickleType.String then 264 | tp :> Pickler 265 | else 266 | new StringRepresentationPickler<'T>(tp) :> Pickler 267 | | _ -> invalidArg prop.Name "property type cannot be represented as string." } 268 | 269 | type SerializerAttributePickler<'T>(serializer: IPropertySerializer, resolver: IPicklerResolver) = 270 | inherit Pickler<'T>() 271 | 272 | let picklePickler = resolver.Resolve serializer.PickleType 273 | 274 | override _.PickleType = picklePickler.PickleType 275 | override _.PicklerType = PicklerType.Serialized 276 | override _.DefaultValue = raise <| NotSupportedException("Default values not supported in serialized types.") 277 | 278 | override _.Pickle value = 279 | let pickle = serializer.Serialize value 280 | picklePickler.PickleUntyped pickle 281 | 282 | override _.UnPickle a = 283 | let pickle = picklePickler.UnPickleUntyped a 284 | serializer.Deserialize pickle 285 | 286 | let mkSerializerAttributePickler (resolver: IPicklerResolver) (serializer: IPropertySerializer) (t: Type) = 287 | TypeShape.Create(t).Accept 288 | { new ITypeVisitor with 289 | member _.Visit<'T>() = new SerializerAttributePickler<'T>(serializer, resolver) :> _ } 290 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/PropertyMetadata.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.PropertyMetadata 3 | 4 | open System 5 | open System.Reflection 6 | 7 | // 8 | // Pickler metadata for F# type properties 9 | // 10 | 11 | [] 12 | type PropertyMetadata = 13 | { Name: string 14 | Index: int 15 | PropertyInfo: PropertyInfo 16 | Pickler: Pickler 17 | NoDefaultValue: bool 18 | Attributes: Attribute[] } 19 | 20 | member rp.TryGetAttribute<'Attribute when 'Attribute :> Attribute>() = tryGetAttribute<'Attribute> rp.Attributes 21 | member rp.GetAttributes<'Attribute when 'Attribute :> Attribute>() = getAttributes<'Attribute> rp.Attributes 22 | member rp.ContainsAttribute<'Attribute when 'Attribute :> Attribute>() = containsAttribute<'Attribute> rp.Attributes 23 | 24 | override r.Equals o = 25 | match o with 26 | | :? PropertyMetadata as r' -> r.PropertyInfo = r'.PropertyInfo 27 | | _ -> false 28 | 29 | override r.GetHashCode() = hash r.PropertyInfo 30 | 31 | static member FromPropertyInfo (resolver: IPicklerResolver) (attrId: int) (prop: PropertyInfo) = 32 | let attributes = prop.GetAttributes() 33 | let pickler = 34 | match 35 | attributes 36 | |> Seq.tryPick (fun a -> 37 | match box a with 38 | | :? IPropertySerializer as ps -> Some ps 39 | | _ -> None) 40 | with 41 | | Some serializer -> mkSerializerAttributePickler resolver serializer prop.PropertyType 42 | | None when attributes |> containsAttribute -> mkStringRepresentationPickler resolver prop 43 | | None -> resolver.Resolve prop.PropertyType 44 | 45 | let name = 46 | match attributes |> tryGetAttribute with 47 | | Some cn -> cn.Name 48 | | None -> prop.Name 49 | 50 | if not <| isValidFieldName name then 51 | invalidArg name "invalid record field name; must be 1 to 64k long (as utf8)." 52 | 53 | { Name = name 54 | Index = attrId 55 | PropertyInfo = prop 56 | Pickler = pickler 57 | NoDefaultValue = containsAttribute attributes 58 | Attributes = attributes } 59 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/RecordPickler.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.RecordPickler 3 | 4 | open System.Collections.Generic 5 | 6 | open Microsoft.FSharp.Reflection 7 | 8 | open Amazon.DynamoDBv2.Model 9 | 10 | open FSharp.AWS.DynamoDB 11 | 12 | // 13 | // Pickler implementation for F# record types 14 | // 15 | 16 | type IRecordPickler = 17 | abstract Properties: PropertyMetadata[] 18 | 19 | type RecordPickler<'T>(ctor: obj[] -> obj, properties: PropertyMetadata[]) = 20 | inherit Pickler<'T>() 21 | 22 | member __.Properties = properties 23 | member __.OfRecord(value: 'T) : RestObject = 24 | let values = new RestObject() 25 | for prop in properties do 26 | let field = prop.PropertyInfo.GetValue value 27 | match prop.Pickler.PickleUntyped field with 28 | | None -> () 29 | | Some av -> values.Add(prop.Name, av) 30 | 31 | values 32 | 33 | member __.ToRecord(ro: RestObject) : 'T = 34 | let values = Array.zeroCreate properties.Length 35 | for i = 0 to properties.Length - 1 do 36 | let prop = properties.[i] 37 | let notFound () = raise <| new KeyNotFoundException(sprintf "attribute %A not found." prop.Name) 38 | let ok, av = ro.TryGetValue prop.Name 39 | if ok then values.[i] <- prop.Pickler.UnPickleUntyped av 40 | elif prop.NoDefaultValue then notFound () 41 | else values.[i] <- prop.Pickler.DefaultValueUntyped 42 | 43 | ctor values :?> 'T 44 | 45 | interface IRecordPickler with 46 | member __.Properties = properties 47 | 48 | override __.PicklerType = PicklerType.Record 49 | override __.PickleType = PickleType.Map 50 | override __.DefaultValue = 51 | let defaultFields = properties |> Array.map (fun p -> p.Pickler.DefaultValueUntyped) 52 | ctor defaultFields :?> 'T 53 | 54 | override __.Pickle(record: 'T) = 55 | let ro = __.OfRecord record 56 | if ro.Count = 0 then 57 | None 58 | else 59 | Some <| AttributeValue(M = ro) 60 | 61 | override __.UnPickle a = if a.IsMSet then __.ToRecord a.M else invalidCast a 62 | 63 | type PropertyMetadata with 64 | 65 | member rp.NestedRecord = 66 | match box rp.Pickler with 67 | | :? IRecordPickler as rp -> Some rp.Properties 68 | | _ -> None 69 | 70 | 71 | let mkTuplePickler<'T> (resolver: IPicklerResolver) = 72 | let ctor = FSharpValue.PreComputeTupleConstructor typeof<'T> 73 | let properties = typeof<'T>.GetProperties() |> Array.mapi (PropertyMetadata.FromPropertyInfo resolver) 74 | new RecordPickler<'T>(ctor, properties) 75 | 76 | let mkFSharpRecordPickler<'T> (resolver: IPicklerResolver) = 77 | let ctor = FSharpValue.PreComputeRecordConstructor(typeof<'T>, true) 78 | let properties = 79 | FSharpType.GetRecordFields(typeof<'T>, true) 80 | |> Array.mapi (PropertyMetadata.FromPropertyInfo resolver) 81 | new RecordPickler<'T>(ctor, properties) 82 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Picklers/UnionPickler.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.UnionPickler 3 | 4 | open System.Collections.Generic 5 | open System.IO 6 | open System.Reflection 7 | 8 | open Microsoft.FSharp.Reflection 9 | 10 | open Amazon.DynamoDBv2.Model 11 | 12 | open FSharp.AWS.DynamoDB 13 | 14 | // 15 | // Pickler implementation for F# Union types 16 | // 17 | 18 | type private UnionCaseData = { UCI: UnionCaseInfo; CaseCtor: MethodInfo; Properties: PropertyMetadata[] } 19 | 20 | type UnionPickler<'U>(resolver: IPicklerResolver) = 21 | inherit Pickler<'U>() 22 | 23 | let caseAttr = "Union_Case" 24 | let ucis = FSharpType.GetUnionCases(typeof<'U>, true) 25 | let tagReader = FSharpValue.PreComputeUnionTagReader(typeof<'U>, true) 26 | let mkUCD uci = 27 | let ctor = FSharpValue.PreComputeUnionConstructorInfo(uci, true) 28 | let props = uci.GetFields() |> Array.mapi (PropertyMetadata.FromPropertyInfo resolver) 29 | { UCI = uci; CaseCtor = ctor; Properties = props } 30 | 31 | let cases = ucis |> Array.map mkUCD 32 | 33 | member __.OfUnion(union: 'U) : RestObject = 34 | let values = new RestObject() 35 | let tag = tagReader union 36 | let case = cases.[tag] 37 | values.Add(caseAttr, AttributeValue(case.UCI.Name)) 38 | for prop in case.Properties do 39 | let field = prop.PropertyInfo.GetValue union 40 | match prop.Pickler.PickleUntyped field with 41 | | None -> () 42 | | Some av -> values.Add(prop.Name, av) 43 | 44 | values 45 | 46 | member __.ToUnion(ro: RestObject) : 'U = 47 | let notFound name = raise <| new KeyNotFoundException(sprintf "attribute %A not found." name) 48 | let tag = 49 | let ok, av = ro.TryGetValue caseAttr 50 | if ok then 51 | match av.S with 52 | | null -> invalidCast av 53 | | tag -> tag 54 | else 55 | notFound caseAttr 56 | 57 | match cases |> Array.tryFind (fun c -> c.UCI.Name = tag) with 58 | | None -> 59 | let msg = sprintf "union case name %A does not correspond to type '%O'." tag typeof<'U> 60 | raise <| new InvalidDataException(msg) 61 | 62 | | Some case -> 63 | let values = Array.zeroCreate case.Properties.Length 64 | for i = 0 to values.Length - 1 do 65 | let prop = case.Properties.[i] 66 | let ok, av = ro.TryGetValue prop.Name 67 | if ok then values.[i] <- prop.Pickler.UnPickleUntyped av 68 | elif prop.NoDefaultValue then notFound prop.Name 69 | else values.[i] <- prop.Pickler.DefaultValueUntyped 70 | 71 | case.CaseCtor.Invoke(null, values) :?> 'U 72 | 73 | override __.PicklerType = PicklerType.Union 74 | override __.PickleType = PickleType.Map 75 | override __.DefaultValue = invalidOp <| sprintf "default values not supported for unions." 76 | 77 | override __.Pickle(union: 'U) = 78 | let ro = __.OfUnion union 79 | Some <| AttributeValue(M = ro) 80 | 81 | override __.UnPickle a = if a.IsMSet then __.ToUnion a.M else invalidCast a 82 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Script.fsx: -------------------------------------------------------------------------------- 1 | #if USE_PUBLISHED_NUGET // If you don't want to do a local build first 2 | #r "nuget: FSharp.AWS.DynamoDB, *-*" // *-* to white-list the fact that all releases to date have been `-beta` sufficed 3 | #else 4 | #I "../../tests/FSharp.AWS.DynamoDB.Tests/bin/Debug/net8.0/" 5 | #r "AWSSDK.Core.dll" 6 | #r "AWSSDK.DynamoDBv2.dll" 7 | #r "FSharp.AWS.DynamoDB.dll" 8 | #endif 9 | 10 | open System 11 | 12 | open Amazon.DynamoDBv2 13 | 14 | open FSharp.AWS.DynamoDB 15 | open FSharp.AWS.DynamoDB.Scripting // non-Async overloads 16 | 17 | #if USE_CLOUD 18 | open Amazon.DynamoDBv2 19 | let ok, creds = 20 | Amazon.Runtime.CredentialManagement 21 | .CredentialProfileStoreChain() 22 | .TryGetAWSCredentials("default") 23 | let ddb = 24 | if ok then 25 | new AmazonDynamoDBClient(creds) :> IAmazonDynamoDB 26 | else 27 | failwith "Unable to load default credentials" 28 | #else // Use Docker-hosted dynamodb-local instance 29 | // See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.DownloadingAndRunning.html#docker for details of how to deploy a simulator instance 30 | #if USE_CREDS_FROM_ENV_VARS // 'AWS_ACCESS_KEY_ID' and 'AWS_SECRET_ACCESS_KEY' must be set for this to work 31 | let credentials = AWSCredentials.FromEnvironmentVariables() 32 | #else 33 | // Credentials are not validated if connecting to local instance so anything will do (this avoids it looking for profiles to be configured) 34 | let credentials = Amazon.Runtime.BasicAWSCredentials("A", "A") 35 | #endif 36 | let clientConfig = AmazonDynamoDBConfig(ServiceURL = "http://localhost:8000") 37 | let ddb = new AmazonDynamoDBClient(credentials, clientConfig) :> IAmazonDynamoDB 38 | #endif 39 | 40 | type Nested = { A: string; B: System.Reflection.BindingFlags } 41 | 42 | type Union = 43 | | A of int 44 | | B of string * int 45 | 46 | type Test = 47 | { [] 48 | HashKey: Guid 49 | [] 50 | RangeKey: string 51 | [] 52 | Value: float 53 | List: int64 list 54 | Unions: Union list 55 | String: string ref 56 | Value2: int option 57 | Values: Nested[] 58 | Date: DateTimeOffset 59 | Map: Map 60 | Set: Set list 61 | Bytes: byte[] } 62 | 63 | let throughput = ProvisionedThroughput(readCapacityUnits = 10L, writeCapacityUnits = 10L) 64 | let table = TableContext.Initialize(ddb, "test", Throughput.Provisioned throughput) 65 | 66 | let value = 67 | { HashKey = Guid.NewGuid() 68 | List = [] 69 | RangeKey = "2" 70 | Value = 3.1415926 71 | Date = DateTimeOffset.Now + TimeSpan.FromDays 2. 72 | Value2 = None 73 | Values = [| { A = "foo"; B = System.Reflection.BindingFlags.Instance } |] 74 | Map = Map.ofList [ ("A1", 1) ] 75 | Set = [ set [ 1L ]; set [ 2L ] ] 76 | Bytes = [| 1uy .. 10uy |] 77 | String = ref "1a" 78 | Unions = [ A 42; B("42", 3) ] } 79 | 80 | let key = table.PutItem value 81 | table.GetItem key 82 | 83 | table.PrimaryKey 84 | table.LocalSecondaryIndices 85 | 86 | table.Query <@ fun r -> r.HashKey = value.HashKey && r.Value >= value.Value @> 87 | 88 | let query = table.Template.PrecomputeConditionalExpr <@ fun r -> r.HashKey = value.HashKey && r.Value >= value.Value @> 89 | 90 | query.IndexName 91 | 92 | #time "on" 93 | 94 | // Real: 00:00:07.996, CPU: 00:00:07.937, GC gen0: 213, gen1: 1, gen2: 0 95 | for i = 1 to 1000 do 96 | let _ = table.Template.PrecomputeUpdateExpr <@ fun r -> { r with Value2 = Some 42 } @> 97 | () 98 | 99 | // Real: 00:01:57.405, CPU: 00:00:19.750, GC gen0: 241, gen1: 13, gen2: 1 100 | for i = 1 to 1000 do 101 | let _ = table.UpdateItem(key, <@ fun r -> { r with Value2 = Some 42 } @>) 102 | () 103 | 104 | // Real: 00:01:35.912, CPU: 00:00:01.921, GC gen0: 27, gen1: 3, gen2: 1 105 | let uexpr = table.Template.PrecomputeUpdateExpr <@ fun r -> { r with Value2 = Some 42 } @> 106 | for i = 1 to 1000 do 107 | let _ = table.UpdateItem(key, uexpr) 108 | () 109 | 110 | // Real: 00:01:35.107, CPU: 00:00:02.078, GC gen0: 26, gen1: 2, gen2: 0 111 | let uexpr2 = table.Template.PrecomputeUpdateExpr <@ fun v r -> { r with Value2 = v } @> 112 | for i = 1 to 1000 do 113 | let _ = table.UpdateItem(key, uexpr2 (Some 42)) 114 | () 115 | 116 | (* Expanded version of README sample that illustrates how one can better split Table initialization from application logic *) 117 | 118 | type internal CounterEntry = 119 | { [] 120 | Id: Guid 121 | Value: int64 } 122 | 123 | /// Represents a single Item in a Counters Table 124 | type Counter internal (table: TableContext, key: TableKey) = 125 | 126 | static member internal Start(table: TableContext) = async { 127 | let initialEntry = { Id = Guid.NewGuid(); Value = 0L } 128 | let! key = table.PutItemAsync(initialEntry) 129 | return Counter(table, key) 130 | } 131 | 132 | member _.Value = async { 133 | let! current = table.GetItemAsync(key) 134 | return current.Value 135 | } 136 | 137 | member _.Incr() = async { 138 | let! updated = table.UpdateItemAsync(key, <@ fun (e: CounterEntry) -> { e with Value = e.Value + 1L } @>) 139 | return updated.Value 140 | } 141 | 142 | /// Wrapper that creates/verifies the table only once per call to Create() 143 | /// This does assume that your application will be sufficiently privileged to create tables on the fly 144 | type EasyCounters private (table: TableContext) = 145 | 146 | // We only want to do the initialization bit once per instance of our application 147 | static member Create(client: IAmazonDynamoDB, tableName: string) : Async = async { 148 | let table = TableContext(client, tableName) 149 | // Create the table if necessary. Verifies schema is correct if it has already been created 150 | // NOTE the hard coded initial throughput provisioning - arguably this belongs outside of your application logic 151 | let throughput = ProvisionedThroughput(readCapacityUnits = 10L, writeCapacityUnits = 10L) 152 | let! _desc = table.VerifyOrCreateTableAsync(Throughput.Provisioned throughput) 153 | return EasyCounters(table) 154 | } 155 | 156 | member _.StartCounter() : Async = Counter.Start table 157 | 158 | /// Variant of EasyCounters that splits the provisioning step from the (optional) validation that the table is present 159 | type SimpleCounters private (table: TableContext) = 160 | 161 | static member Provision(client: IAmazonDynamoDB, tableName: string, readCapacityUnits, writeCapacityUnits) = async { 162 | let table = TableContext(client, tableName) 163 | let provisionedThroughput = ProvisionedThroughput(readCapacityUnits, writeCapacityUnits) 164 | let throughput = Throughput.Provisioned provisionedThroughput 165 | // normally, RCU/WCU provisioning only happens first time the Table is created and is then considered an external concern 166 | // here we use `UpdateTableIfRequiredAsync` to reset it each time we deploy the app 167 | let! desc = table.VerifyOrCreateTableAsync(throughput) 168 | return! table.UpdateTableIfRequiredAsync(throughput, currentTableDescription = desc) 169 | } 170 | 171 | static member ProvisionOnDemand(client: IAmazonDynamoDB, tableName: string) = async { 172 | let table = TableContext(client, tableName) 173 | let throughput = Throughput.OnDemand 174 | let! desc = table.VerifyOrCreateTableAsync(throughput) 175 | // as per the Provision, above, we reset to OnDemand, if it got reconfigured since it was originally created 176 | return! table.UpdateTableIfRequiredAsync(throughput, currentTableDescription = desc) 177 | } 178 | 179 | /// We only want to do the initialization bit once per instance of our application 180 | /// Similar to EasyCounters.Create in that it ensures the table is provisioned correctly 181 | /// However it will never actually create the table 182 | static member CreateWithVerify(client: IAmazonDynamoDB, tableName: string) : Async = async { 183 | let table = TableContext(client, tableName) 184 | // This validates the Table has been created correctly 185 | // (in general this is a good idea, but it is an optional step so it can be skipped, i.e. see Create() below) 186 | do! table.VerifyTableAsync() 187 | return SimpleCounters(table) 188 | } 189 | 190 | /// Assumes the table has been provisioned externally via Provision() 191 | static member Create(client: IAmazonDynamoDB, tableName: string) : SimpleCounters = 192 | // NOTE we are skipping 193 | SimpleCounters(TableContext(client, tableName)) 194 | 195 | member _.StartCounter() : Async = Counter.Start table 196 | 197 | let e = EasyCounters.Create(ddb, "testing") |> Async.RunSynchronously 198 | let e1 = e.StartCounter() |> Async.RunSynchronously 199 | let e2 = e.StartCounter() |> Async.RunSynchronously 200 | e1.Incr() |> Async.RunSynchronously 201 | e2.Incr() |> Async.RunSynchronously 202 | 203 | // First, we create it in On-Demand mode 204 | SimpleCounters.ProvisionOnDemand(ddb, "testing-pre-provisioned") |> Async.RunSynchronously 205 | // Then we flip it to Provisioned mode 206 | SimpleCounters.Provision(ddb, "testing-pre-provisioned", readCapacityUnits = 10L, writeCapacityUnits = 10L) 207 | |> Async.RunSynchronously 208 | // The consuming code can assume the provisioning has been carried out as part of the deploy 209 | // that allows the creation to be synchronous (and not impede application startup) 210 | let s = SimpleCounters.Create(ddb, "testing-pre-provisioned") 211 | let s1 = s.StartCounter() |> Async.RunSynchronously // Throws if Provision step has not been executed 212 | s1.Incr() |> Async.RunSynchronously 213 | 214 | // Alternately, we can have the app do an extra call (and have some asynchronous initialization work) to check the table is ready 215 | let v = SimpleCounters.CreateWithVerify(ddb, "testing-not-present") |> Async.RunSynchronously // Throws, as table not present 216 | let v2 = v.StartCounter() |> Async.RunSynchronously 217 | v2.Incr() |> Async.RunSynchronously 218 | 219 | // (TOCONSIDER: Illustrate how to use AsyncCacheCell from https://github.com/jet/equinox/blob/master/src/Equinox.Core/AsyncCacheCell.fs to make Verify call lazy) 220 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Types.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB 2 | 3 | open System 4 | 5 | open Amazon.DynamoDBv2 6 | 7 | /// Declares that the carrying property contains the HashKey 8 | /// for the record instance. Property type must be of type 9 | /// string, number or byte array. 10 | [] 11 | type HashKeyAttribute() = 12 | inherit Attribute() 13 | 14 | /// Declares that the carrying property contains the RangeKey 15 | /// for the record instance. Property type must be of type 16 | /// string, number or byte array. 17 | [] 18 | type RangeKeyAttribute() = 19 | inherit Attribute() 20 | 21 | /// Declares that the carrying property should contain the HashKey 22 | /// for a global secondary index. 23 | [] 24 | type GlobalSecondaryHashKeyAttribute(indexName: string) = 25 | inherit Attribute() 26 | member _.IndexName = indexName 27 | 28 | /// Declares that the carrying property should contain the RangeKey 29 | /// for a global secondary index. 30 | [] 31 | type GlobalSecondaryRangeKeyAttribute(indexName: string) = 32 | inherit Attribute() 33 | member _.IndexName = indexName 34 | 35 | /// Declares the carrying property as local secondary index 36 | /// in the table schema. 37 | [] 38 | type LocalSecondaryIndexAttribute private (indexName: string option) = 39 | inherit Attribute() 40 | new() = LocalSecondaryIndexAttribute(None) 41 | new(indexName: string) = LocalSecondaryIndexAttribute(Some indexName) 42 | member internal _.IndexName = indexName 43 | 44 | /// Declares a constant HashKey attribute for the given record. 45 | /// Records carrying this attribute should specify a RangeKey field. 46 | [] 47 | type ConstantHashKeyAttribute(name: string, hashkey: obj) = 48 | inherit Attribute() 49 | do 50 | if isNull name then 51 | raise <| ArgumentNullException("name") 52 | if isNull hashkey then 53 | raise <| ArgumentNullException("hashkey") 54 | 55 | member _.Name = name 56 | member _.HashKey = hashkey 57 | member _.HashKeyType = hashkey.GetType() 58 | 59 | /// Declares a constant RangeKey attribute for the given record. 60 | /// Records carrying this attribute should specify a HashKey field. 61 | [] 62 | type ConstantRangeKeyAttribute(name: string, rangeKey: obj) = 63 | inherit Attribute() 64 | do 65 | if isNull name then 66 | raise <| ArgumentNullException("name") 67 | if isNull rangeKey then 68 | raise <| ArgumentNullException("rangeKey") 69 | 70 | member _.Name = name 71 | member _.RangeKey = rangeKey 72 | member _.HashKeyType = rangeKey.GetType() 73 | 74 | /// Declares that annotated property should be represented 75 | /// as string in the DynamoDB table. Only applies to 76 | [] 77 | type StringRepresentationAttribute() = 78 | inherit Attribute() 79 | 80 | /// Specify a custom DynamoDB attribute name for the given record field. 81 | [] 82 | type CustomNameAttribute(name: string) = 83 | inherit Attribute() 84 | do 85 | if isNull name then 86 | raise <| ArgumentNullException("name") 87 | member _.Name = name 88 | 89 | /// Specifies that record deserialization should fail if not corresponding attribute 90 | /// was fetched from the table. 91 | [] 92 | type NoDefaultValueAttribute() = 93 | inherit Attribute() 94 | 95 | /// Declares that the given property should be serialized using the given 96 | /// Serialization/Deserialization methods before being uploaded to the table. 97 | type internal IPropertySerializer = 98 | abstract PickleType: Type 99 | abstract Serialize: value: 'T -> obj 100 | abstract Deserialize: pickle: obj -> 'T 101 | 102 | /// Declares that the given property should be serialized using the given 103 | /// Serialization/Deserialization methods before being uploaded to the table. 104 | [] 105 | type PropertySerializerAttribute<'PickleType>() = 106 | inherit Attribute() 107 | /// Serializes a value to the given pickle type 108 | abstract Serialize: 'T -> 'PickleType 109 | /// Deserializes a value from the given pickle type 110 | abstract Deserialize: 'PickleType -> 'T 111 | 112 | interface IPropertySerializer with 113 | member _.PickleType = typeof<'PickleType> 114 | member x.Serialize value = x.Serialize value :> obj 115 | member x.Deserialize pickle = x.Deserialize(pickle :?> 'PickleType) 116 | 117 | /// Metadata on a table key attribute 118 | type KeyAttributeSchema = { AttributeName: string; KeyType: ScalarAttributeType } 119 | 120 | /// Identifies type of DynamoDB table key schema 121 | type KeySchemaType = 122 | | PrimaryKey 123 | | GlobalSecondaryIndex of indexName: string 124 | | LocalSecondaryIndex of indexName: string 125 | 126 | member kst.IndexName = 127 | match kst with 128 | | GlobalSecondaryIndex name 129 | | LocalSecondaryIndex name -> Some name 130 | | PrimaryKey -> None 131 | 132 | /// DynamoDB table key schema description 133 | type TableKeySchema = 134 | { HashKey: KeyAttributeSchema 135 | RangeKey: KeyAttributeSchema option 136 | Type: KeySchemaType } 137 | 138 | /// Table entry key identifier 139 | [] 140 | type TableKey private (hashKey: obj, rangeKey: obj) = 141 | member _.HashKey = hashKey 142 | member _.RangeKey = rangeKey 143 | member _.IsRangeKeySpecified = notNull rangeKey 144 | member _.IsHashKeySpecified = notNull hashKey 145 | member private _.Format = 146 | match rangeKey with 147 | | null -> sprintf "{ HashKey = %A }" hashKey 148 | | rk -> 149 | match hashKey with 150 | | null -> sprintf "{ RangeKey = %A }" rk 151 | | hk -> sprintf "{ HashKey = %A ; RangeKey = %A }" hk rk 152 | 153 | override __.ToString() = __.Format 154 | 155 | override tk.Equals o = 156 | match o with 157 | | :? TableKey as tk' -> hashKey = tk'.HashKey && rangeKey = tk'.RangeKey 158 | | _ -> false 159 | 160 | override tk.GetHashCode() = hash2 hashKey rangeKey 161 | 162 | /// Defines a table key using provided HashKey 163 | static member Hash<'HashKey>(hashKey: 'HashKey) = 164 | if isNull hashKey then 165 | raise <| ArgumentNullException("hashKey") 166 | TableKey(hashKey, null) 167 | 168 | /// Defines a table key using provided RangeKey 169 | static member Range<'RangeKey>(rangeKey: 'RangeKey) = 170 | if isNull rangeKey then 171 | raise <| ArgumentNullException("rangeKey") 172 | TableKey(null, rangeKey) 173 | 174 | /// Defines a table key using combined HashKey and RangeKey 175 | static member Combined<'HashKey, 'RangeKey>(hashKey: 'HashKey, rangeKey: 'RangeKey) = 176 | if isNull hashKey then 177 | raise <| ArgumentNullException("hashKey") 178 | TableKey(hashKey, rangeKey) 179 | 180 | /// Query (start/last evaluated) key identifier 181 | [] 182 | type IndexKey private (hashKey: obj, rangeKey: obj, primaryKey: TableKey) = 183 | member _.HashKey = hashKey 184 | member _.RangeKey = rangeKey 185 | member _.IsRangeKeySpecified = notNull rangeKey 186 | member _.PrimaryKey = primaryKey 187 | member private _.Format = 188 | match (hashKey, rangeKey) with 189 | | null, null -> sprintf "{ Primary = %A }" primaryKey 190 | | hk, null -> sprintf "{ HashKey = %A ; Primary = %A }" hk primaryKey 191 | | hk, rk -> sprintf "{ HashKey = %A ; RangeKey = %A ; Primary = %A }" hk rk primaryKey 192 | 193 | override x.ToString() = x.Format 194 | 195 | override _.Equals o = 196 | match o with 197 | | :? IndexKey as qk' -> hashKey = qk'.HashKey && rangeKey = qk'.RangeKey && primaryKey = qk'.PrimaryKey 198 | | _ -> false 199 | 200 | override _.GetHashCode() = hash3 hashKey rangeKey primaryKey 201 | 202 | /// Defines an index key using provided HashKey and primary TableKey 203 | static member Hash<'HashKey>(hashKey: 'HashKey, primaryKey: TableKey) = 204 | if isNull hashKey then 205 | raise <| ArgumentNullException("hashKey") 206 | IndexKey(hashKey, null, primaryKey) 207 | 208 | /// Defines an index key using combined HashKey, RangeKey and primary TableKey 209 | static member Combined<'HashKey, 'RangeKey>(hashKey: 'HashKey, rangeKey: 'RangeKey, primaryKey: TableKey) = 210 | if isNull hashKey then 211 | raise <| ArgumentNullException("hashKey") 212 | IndexKey(hashKey, rangeKey, primaryKey) 213 | 214 | // Defines an index key using just the primary TableKey 215 | static member Primary(primaryKey: TableKey) = IndexKey(null, null, primaryKey) 216 | 217 | /// Pagination result type 218 | type PaginatedResult<'TRecord, 'Key> = 219 | { Records: 'TRecord[] 220 | LastEvaluatedKey: 'Key option } 221 | 222 | interface System.Collections.IEnumerable with 223 | member x.GetEnumerator() = x.Records.GetEnumerator() 224 | interface System.Collections.Generic.IEnumerable<'TRecord> with 225 | member x.GetEnumerator() = (x.Records :> System.Collections.Generic.IEnumerable<'TRecord>).GetEnumerator() 226 | 227 | 228 | #nowarn "1182" 229 | 230 | /// Conditional expression special operators 231 | [] 232 | module ConditionalOperators = 233 | 234 | /// Decides whether parameter lies within given range 235 | let BETWEEN (x: 'T) (lower: 'T) (upper: 'T) : bool = lower <= x && x <= upper 236 | 237 | /// Checks whether a record attribute exists in DynamoDB 238 | let EXISTS (attr: 'T) : bool = invalidOp "EXISTS predicate reserved for quoted condition expressions." 239 | 240 | /// Checks whether a record attribute does not exist in DynamoDB 241 | let NOT_EXISTS (attr: 'T) : bool = invalidOp "NOT_EXISTS predicate reserved for quoted condition expressions." 242 | 243 | /// Update expression special operators 244 | [] 245 | module UpdateOperators = 246 | 247 | /// Table Update operation placeholder type 248 | type UpdateOp = 249 | /// Combines two update operations into one 250 | static member (&&&)(left: UpdateOp, right: UpdateOp) : UpdateOp = 251 | invalidOp "Update combiner reserved for quoted update expressions." 252 | 253 | /// Assigns a record attribute path to given value 254 | let SET (path: 'T) (value: 'T) : UpdateOp = invalidOp "SET operation reserved for quoted update expressions." 255 | 256 | /// Removes a record attribute path from entry 257 | let REMOVE (path: 'T) : UpdateOp = invalidOp "REMOVE operation reserved for quoted update expressions." 258 | 259 | /// Adds given set of values to set attribute path 260 | let ADD (path: Set<'T>) (values: seq<'T>) : UpdateOp = invalidOp "ADD operation reserved for quoted update expressions." 261 | 262 | /// Deletes given set of values to set attribute path 263 | let DELETE (path: Set<'T>) (values: seq<'T>) : UpdateOp = invalidOp "DELETE operation reserved for quoted update expressions." 264 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Utils/DynamoUtils.fs: -------------------------------------------------------------------------------- 1 | [] 2 | module internal FSharp.AWS.DynamoDB.DynamoUtils 3 | 4 | open System.IO 5 | open System.Collections.Generic 6 | open System.Text.RegularExpressions 7 | 8 | open Amazon.DynamoDBv2.Model 9 | 10 | type AttributeValueComparer() = 11 | static let areEqualMemoryStreams (m: MemoryStream) (m': MemoryStream) = 12 | if m.Length <> m'.Length then 13 | false 14 | else 15 | m.ToArray() = m'.ToArray() 16 | 17 | static let areEqualResizeArrays (ra: ResizeArray<'T>) (ra': ResizeArray<'T>) = 18 | if ra.Count <> ra'.Count then 19 | false 20 | else 21 | let mutable areEqual = true 22 | let mutable i = 0 23 | while areEqual && i < ra.Count do 24 | areEqual <- ra.[i] = ra'.[i] 25 | i <- i + 1 26 | 27 | areEqual 28 | 29 | static let rec areEqualAttributeValues (av: AttributeValue) (av': AttributeValue) = 30 | if av.NULL then 31 | av'.NULL 32 | elif av.IsBOOLSet then 33 | av'.IsBOOLSet && av.BOOL = av'.BOOL 34 | elif notNull av.S then 35 | notNull av'.S && av.S = av'.S 36 | elif notNull av.N then 37 | notNull av'.N && av.N = av'.N 38 | elif notNull av.B then 39 | notNull av'.B && areEqualMemoryStreams av.B av'.B 40 | elif av.SS.Count > 0 then 41 | av'.SS.Count > 0 && areEqualResizeArrays av.SS av'.SS 42 | elif av.NS.Count > 0 then 43 | av'.NS.Count > 0 && areEqualResizeArrays av.NS av'.NS 44 | elif av.BS.Count > 0 then 45 | av'.BS.Count > 0 && av.BS.Count = av'.BS.Count && Seq.forall2 areEqualMemoryStreams av.BS av'.BS 46 | elif av.IsLSet then 47 | av'.IsLSet && av.L.Count = av'.L.Count && Seq.forall2 areEqualAttributeValues av.L av'.L 48 | elif av.IsMSet then 49 | av'.IsMSet 50 | && av.M.Count = av'.M.Count 51 | && av.M 52 | |> Seq.forall (fun kv -> 53 | let ok, found = av'.M.TryGetValue kv.Key 54 | if ok then areEqualAttributeValues kv.Value found else false) 55 | else 56 | true 57 | 58 | static let getSeqHash (eh: 'T -> int) (ts: seq<'T>) = 59 | let mutable h = 13 60 | for t in ts do 61 | h <- combineHash h (eh t) 62 | h 63 | 64 | static let rec getAttributeValueHashCode (av: AttributeValue) = 65 | if av.NULL then 66 | 0 67 | elif av.IsBOOLSet then 68 | hash av.BOOL 69 | elif notNull av.S then 70 | hash av.S 71 | elif notNull av.N then 72 | hash av.N 73 | elif notNull av.B then 74 | hash av.B.Length 75 | elif av.SS.Count > 0 then 76 | getSeqHash hash av.SS 77 | elif av.NS.Count > 0 then 78 | getSeqHash hash av.NS 79 | elif av.BS.Count > 0 then 80 | av.BS |> getSeqHash (fun m -> hash m.Length) 81 | elif av.IsLSet then 82 | getSeqHash getAttributeValueHashCode av.L 83 | elif av.IsMSet then 84 | av.M |> getSeqHash (fun kv -> hash2 kv.Key (getAttributeValueHashCode kv.Value)) 85 | else 86 | -1 87 | 88 | static member Equals(l, r) = areEqualAttributeValues l r 89 | static member GetHashCode av = getAttributeValueHashCode av 90 | 91 | interface IEqualityComparer with 92 | member __.Equals(l, r) = areEqualAttributeValues l r 93 | member __.GetHashCode av = getAttributeValueHashCode av 94 | 95 | /// Struct AttributeValue wrapper with modified equality semantics 96 | [] 97 | type AttributeValueEqWrapper(av: AttributeValue) = 98 | member __.AttributeValue = av 99 | override __.Equals(o) = 100 | match o with 101 | | :? AttributeValueEqWrapper as av' -> AttributeValueComparer.Equals(av, av') 102 | | _ -> false 103 | 104 | override __.GetHashCode() = AttributeValueComparer.GetHashCode av 105 | 106 | let inline wrap av = new AttributeValueEqWrapper(av) 107 | let inline unwrap (avw: AttributeValueEqWrapper) = avw.AttributeValue 108 | 109 | type AttributeValue with 110 | 111 | member inline av.IsSSSet = av.SS.Count > 0 112 | member inline av.IsNSSet = av.NS.Count > 0 113 | member inline av.IsBSSet = av.BS.Count > 0 114 | 115 | member av.Print() = 116 | if av.NULL then 117 | "{ NULL = true }" 118 | elif av.IsBOOLSet then 119 | sprintf "{ BOOL = %b }" av.BOOL 120 | elif av.S <> null then 121 | sprintf "{ S = %s }" av.S 122 | elif av.N <> null then 123 | sprintf "{ N = %s }" av.N 124 | elif av.B <> null then 125 | sprintf "{ N = %A }" (av.B.ToArray()) 126 | elif av.SS.Count > 0 then 127 | sprintf "{ SS = %A }" (Seq.toArray av.SS) 128 | elif av.NS.Count > 0 then 129 | sprintf "{ SN = %A }" (Seq.toArray av.NS) 130 | elif av.BS.Count > 0 then 131 | av.BS |> Seq.map (fun bs -> bs.ToArray()) |> Seq.toArray |> sprintf "{ BS = %A }" 132 | elif av.IsLSet then 133 | av.L |> Seq.map (fun av -> av.Print()) |> Seq.toArray |> sprintf "{ L = %A }" 134 | elif av.IsMSet then 135 | av.M |> Seq.map (fun kv -> (kv.Key, kv.Value.Print())) |> Seq.toArray |> sprintf "{ M = %A }" 136 | else 137 | "{ }" 138 | 139 | // DynamoDB Name limitations, see: 140 | // http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html 141 | let private tableNameRegex = Regex("^[\w\-_\.]*$", RegexOptions.Compiled) 142 | let isValidTableName (tableName: string) = 143 | if tableName.Length < 3 || tableName.Length > 255 then false 144 | elif not <| tableNameRegex.IsMatch tableName then false 145 | else true 146 | 147 | let private utf8Length (str: string) = System.Text.Encoding.UTF8.GetBytes(str).Length 148 | 149 | let isValidFieldName (name: string) = name <> null && name.Length > 0 && utf8Length name <= 65535 150 | 151 | let isValidKeyName (name: string) = name <> null && name.Length > 0 && utf8Length name <= 255 152 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/Utils/Utils.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB 2 | 3 | open System 4 | open System.Collections.Generic 5 | open System.Reflection 6 | open System.Threading.Tasks 7 | open System.Text 8 | 9 | open Microsoft.FSharp.Quotations 10 | open Microsoft.FSharp.Quotations.Patterns 11 | open Microsoft.FSharp.Quotations.DerivedPatterns 12 | 13 | [] 14 | module internal Utils = 15 | 16 | let inline rlist (ts: seq<'T>) = ResizeArray<_>(ts) 17 | 18 | let inline keyVal k v = KeyValuePair<_, _>(k, v) 19 | 20 | let inline cdict (kvs: seq>) = 21 | let d = new Dictionary<'K, 'V>() 22 | for kv in kvs do 23 | d.Add(kv.Key, kv.Value) 24 | d 25 | 26 | let inline isNull o = obj.ReferenceEquals(o, null) 27 | let inline notNull o = not <| obj.ReferenceEquals(o, null) 28 | 29 | /// taken from mscorlib's Tuple.GetHashCode() implementation 30 | let inline combineHash (h1: int) (h2: int) = ((h1 <<< 5) + h1) ^^^ h2 31 | 32 | /// pair hashcode generation without tuple allocation 33 | let inline hash2 (t: 'T) (s: 'S) = combineHash (hash t) (hash s) 34 | 35 | /// triple hashcode generation without tuple allocation 36 | let inline hash3 (t: 'T) (s: 'S) (u: 'U) = combineHash (combineHash (hash t) (hash s)) (hash u) 37 | 38 | /// quadruple hashcode generation without tuple allocation 39 | let inline hash4 (t: 'T) (s: 'S) (u: 'U) (v: 'V) = combineHash (combineHash (combineHash (hash t) (hash s)) (hash u)) (hash v) 40 | 41 | let inline mkString (builder: (string -> unit) -> unit) : string = 42 | let sb = StringBuilder() 43 | builder (fun s -> sb.Append s |> ignore) 44 | sb.ToString() 45 | 46 | let tryGetAttribute<'Attribute when 'Attribute :> Attribute> (attrs: seq) : 'Attribute option = 47 | attrs 48 | |> Seq.tryPick (function 49 | | :? 'Attribute as a -> Some a 50 | | _ -> None) 51 | 52 | let getAttributes<'Attribute when 'Attribute :> Attribute> (attrs: seq) : 'Attribute[] = 53 | attrs 54 | |> Seq.choose (function 55 | | :? 'Attribute as a -> Some a 56 | | _ -> None) 57 | |> Seq.toArray 58 | 59 | let containsAttribute<'Attribute when 'Attribute :> Attribute> (attrs: seq) : bool = 60 | attrs |> Seq.exists (fun a -> a :? 'Attribute) 61 | 62 | [] 63 | module List = 64 | let rec last (ts: 'T list) = 65 | match ts with 66 | | [] -> invalidArg "ts" "list is empty" 67 | | [ t ] -> t 68 | | _ :: tail -> last tail 69 | 70 | type MemberInfo with 71 | 72 | member m.TryGetAttribute<'Attribute when 'Attribute :> Attribute>() : 'Attribute option = 73 | m.GetCustomAttributes(true) |> Seq.map unbox |> tryGetAttribute 74 | 75 | member m.GetAttributes<'Attribute when 'Attribute :> Attribute>() : 'Attribute[] = 76 | m.GetCustomAttributes(true) |> Seq.map unbox |> getAttributes 77 | 78 | member m.ContainsAttribute<'Attribute when 'Attribute :> Attribute>() : bool = 79 | m.GetCustomAttributes(true) |> Seq.map unbox |> containsAttribute 80 | 81 | type MethodInfo with 82 | 83 | /// Gets the underlying method definition 84 | /// including the supplied declaring type and method type arguments 85 | member m.GetUnderlyingMethodDefinition() : MethodInfo * Type[] * Type[] = 86 | let dt = m.DeclaringType 87 | if dt.IsGenericType then 88 | let gt = dt.GetGenericTypeDefinition() 89 | let gas = dt.GetGenericArguments() 90 | let mas = m.GetGenericArguments() 91 | 92 | let bindingFlags = 93 | BindingFlags.Public 94 | ||| BindingFlags.NonPublic 95 | ||| BindingFlags.Static 96 | ||| BindingFlags.Instance 97 | ||| BindingFlags.FlattenHierarchy 98 | 99 | let m = 100 | gt.GetMethods(bindingFlags) 101 | |> Array.find (fun m' -> m.Name = m'.Name && m.MetadataToken = m'.MetadataToken) 102 | 103 | m, gas, mas 104 | 105 | elif m.IsGenericMethod then 106 | let mas = m.GetGenericArguments() 107 | m.GetGenericMethodDefinition(), [||], mas 108 | 109 | else 110 | m, [||], [||] 111 | 112 | type PropertyInfo with 113 | 114 | member p.GetUnderlyingProperty() : PropertyInfo * Type[] = 115 | let dt = p.DeclaringType 116 | if dt.IsGenericType then 117 | let gt = dt.GetGenericTypeDefinition() 118 | let gas = dt.GetGenericArguments() 119 | 120 | let bindingFlags = 121 | BindingFlags.Public 122 | ||| BindingFlags.NonPublic 123 | ||| BindingFlags.Static 124 | ||| BindingFlags.Instance 125 | ||| BindingFlags.FlattenHierarchy 126 | 127 | let gp = gt.GetProperty(p.Name, bindingFlags) 128 | 129 | gp, gas 130 | else 131 | p, [||] 132 | 133 | type Expr with 134 | 135 | member e.IsClosed = e.GetFreeVars() |> Seq.isEmpty 136 | member e.Substitute(v: Var, sub: Expr) = e.Substitute(fun w -> if v = w then Some sub else None) 137 | 138 | type Environment with 139 | 140 | /// 141 | /// Resolves an environment variable from the local machine. 142 | /// Variables are resolved using the following target order: 143 | /// Process, User and finally, Machine. 144 | /// 145 | /// Environment variable name. 146 | static member ResolveEnvironmentVariable(variableName: string) = 147 | let aux found target = 148 | if String.IsNullOrWhiteSpace found then 149 | Environment.GetEnvironmentVariable(variableName, target) 150 | else 151 | found 152 | 153 | Array.fold 154 | aux 155 | null 156 | [| EnvironmentVariableTarget.Process 157 | EnvironmentVariableTarget.User 158 | EnvironmentVariableTarget.Machine |] 159 | 160 | /// Variations of DerivedPatterns.SpecificCall which correctly 161 | /// recognizes methods of generic types 162 | /// See also https://github.com/fsharp/fsharp/issues/546 163 | let (|SpecificCall2|_|) (pattern: Expr) = 164 | match pattern with 165 | | Lambdas(_, Call(_, mI, _)) 166 | | Call(_, mI, _) -> 167 | let gm, _, _ = mI.GetUnderlyingMethodDefinition() 168 | 169 | fun (input: Expr) -> 170 | match input with 171 | | Call(obj, mI', args) -> 172 | let gm', ta, ma = mI'.GetUnderlyingMethodDefinition() 173 | if gm = gm' then 174 | Some(obj, Array.toList ta, Array.toList ma, args) 175 | else 176 | None 177 | | _ -> None 178 | 179 | | _ -> invalidArg "pattern" "supplied pattern is not a method call" 180 | 181 | let (|SpecificProperty|_|) (pattern: Expr) = 182 | match pattern with 183 | | Lambdas(_, PropertyGet(_, pI, _)) 184 | | PropertyGet(_, pI, _) -> 185 | let gp, _ = pI.GetUnderlyingProperty() 186 | 187 | fun (input: Expr) -> 188 | match input with 189 | | PropertyGet(obj, pI', args) -> 190 | let gp', ta = pI'.GetUnderlyingProperty() 191 | if gp' = gp then Some(obj, Array.toList ta, args) else None 192 | | _ -> None 193 | 194 | | _ -> invalidArg "pattern" "supplied pattern is not a property getter" 195 | 196 | let (|IndexGet|_|) (e: Expr) = 197 | match e with 198 | | SpecificCall2 <@ LanguagePrimitives.IntrinsicFunctions.GetArray @> (None, _, [ t ], [ obj; index ]) -> Some(obj, t, index) 199 | | PropertyGet(Some obj, prop, [ index ]) when prop.Name = "Item" -> Some(obj, prop.PropertyType, index) 200 | | _ -> None 201 | 202 | let (|PipeLeft|_|) (e: Expr) = 203 | match e with 204 | | SpecificCall2 <@ (<|) @> (None, _, _, [ func; arg ]) -> 205 | let rec unwind (body: Expr) = 206 | match body with 207 | | Let(x, value, body) -> unwind (body.Substitute(x, value)) 208 | | Lambda(v, body) -> Some <| body.Substitute(v, arg) 209 | | _ -> None 210 | 211 | unwind func 212 | | _ -> None 213 | 214 | let (|PipeRight|_|) (e: Expr) = 215 | match e with 216 | | SpecificCall2 <@ (|>) @> (None, _, _, [ left; right ]) -> 217 | let rec unwind (body: Expr) = 218 | match body with 219 | | Let(x, value, body) -> unwind (body.Substitute(x, value)) 220 | | Lambda(x, body) -> Some <| body.Substitute(x, left) 221 | | _ -> None 222 | 223 | unwind right 224 | | _ -> None 225 | 226 | let (|ConsList|_|) (e: Expr) = 227 | match e with 228 | | NewUnionCase(uci, [ h; t ]) -> 229 | let dt = uci.DeclaringType 230 | if dt.IsGenericType && dt.GetGenericTypeDefinition() = typedefof<_ list> then 231 | Some(h, t) 232 | else 233 | None 234 | | _ -> None 235 | 236 | let (|OptionSome|_|) (e: Expr) = 237 | match e with 238 | | NewUnionCase(uci, [ v ]) -> 239 | let dt = uci.DeclaringType 240 | if dt.IsGenericType && dt.GetGenericTypeDefinition() = typedefof<_ option> && uci.Name = (nameof Some) then 241 | Some v 242 | else 243 | None 244 | | _ -> None 245 | 246 | type Async with 247 | 248 | /// Raise an exception 249 | static member Raise e = Async.FromContinuations(fun (_, ec, _) -> ec e) 250 | 251 | (* Direct copies of canonical implementation at http://www.fssnip.net/7Rc/title/AsyncAwaitTaskCorrect 252 | pending that being officially packaged somewhere or integrated into FSharp.Core https://github.com/fsharp/fslang-suggestions/issues/840 *) 253 | 254 | /// 255 | /// Gets the result of given task so that in the event of exception 256 | /// the actual user exception is raised as opposed to being wrapped 257 | /// in a System.AggregateException. 258 | /// 259 | /// Task to be awaited. 260 | [] 261 | static member AwaitTaskCorrect(task: Task<'T>) : Async<'T> = 262 | Async.FromContinuations(fun (sc, ec, _cc) -> 263 | task.ContinueWith(fun (t: Task<'T>) -> 264 | if t.IsFaulted then 265 | let e = t.Exception 266 | if e.InnerExceptions.Count = 1 then 267 | ec e.InnerExceptions[0] 268 | else 269 | ec e 270 | elif t.IsCanceled then 271 | ec (TaskCanceledException()) 272 | else 273 | sc t.Result) 274 | |> ignore) 275 | 276 | /// 277 | /// Gets the result of given task so that in the event of exception 278 | /// the actual user exception is raised as opposed to being wrapped 279 | /// in a System.AggregateException. 280 | /// 281 | /// Task to be awaited. 282 | [] 283 | static member AwaitTaskCorrect(task: Task) : Async = 284 | Async.FromContinuations(fun (sc, ec, _cc) -> 285 | task.ContinueWith(fun (task: Task) -> 286 | if task.IsFaulted then 287 | let e = task.Exception 288 | if e.InnerExceptions.Count = 1 then 289 | ec e.InnerExceptions[0] 290 | else 291 | ec e 292 | elif task.IsCanceled then 293 | ec (TaskCanceledException()) 294 | else 295 | sc ()) 296 | |> ignore) 297 | 298 | [] 299 | module Seq = 300 | let joinBy (pred: 'T -> 'S -> bool) (ts: seq<'T>) (ss: seq<'S>) : seq<'T * 'S> = seq { 301 | for t in ts do 302 | for s in ss do 303 | if pred t s then 304 | yield (t, s) 305 | } 306 | 307 | /// Gets the home path for the current user 308 | let getHomePath () = 309 | match Environment.OSVersion.Platform with 310 | | PlatformID.Unix 311 | | PlatformID.MacOSX -> Environment.GetEnvironmentVariable "HOME" 312 | | _ -> Environment.ExpandEnvironmentVariables "%HOMEDRIVE%%HOMEPATH%" 313 | 314 | [] 315 | module ResizeArray = 316 | let mapToArray f (list: ResizeArray<_>) = 317 | let newList = Array.zeroCreate list.Count 318 | for i in 0 .. list.Count - 1 do 319 | newList.[i] <- f list.[i] 320 | newList 321 | -------------------------------------------------------------------------------- /src/FSharp.AWS.DynamoDB/paket.references: -------------------------------------------------------------------------------- 1 | AWSSDK.DynamoDBv2 2 | DotNet.ReproducibleBuilds 3 | FSharp.Core 4 | Unquote 5 | File: TypeShape.fs TypeShape 6 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/FSharp.AWS.DynamoDB.Tests.fsproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | net8.0 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/MetricsCollectorTests.fs: -------------------------------------------------------------------------------- 1 | module FSharp.AWS.DynamoDB.Tests.MetricsCollector 2 | 3 | open System 4 | 5 | open Swensen.Unquote 6 | open Xunit 7 | 8 | open FSharp.AWS.DynamoDB 9 | open FSharp.AWS.DynamoDB.Scripting 10 | 11 | open Amazon.DynamoDBv2.Model 12 | 13 | type MetricsRecord = 14 | { [] 15 | HashKey: string 16 | [] 17 | RangeKey: int 18 | 19 | [] 20 | LocalSecondaryRangeKey: string 21 | 22 | [] 23 | SecondaryHashKey: string 24 | [] 25 | SecondaryRangeKey: int 26 | 27 | LocalAttribute: int } 28 | 29 | let rand = let r = Random.Shared in fun () -> r.Next() |> int64 30 | let mkItem (hk: string) (gshk: string) (i: int) : MetricsRecord = 31 | { HashKey = hk 32 | RangeKey = i 33 | LocalSecondaryRangeKey = guid () 34 | SecondaryHashKey = gshk 35 | SecondaryRangeKey = i 36 | LocalAttribute = int (rand () % 2L) } 37 | 38 | type TestCollector() = 39 | 40 | let metrics = ResizeArray() 41 | 42 | member _.Collect(m: RequestMetrics) = metrics.Add m 43 | 44 | member _.Metrics = metrics |> Seq.toList 45 | 46 | member _.Clear() = metrics.Clear() 47 | 48 | let (|TotalCu|): ConsumedCapacity list -> float = Seq.sumBy (fun c -> c.CapacityUnits) 49 | 50 | /// Tests without common setup 51 | type Tests(fixture: TableFixture) = 52 | 53 | let rawTable = fixture.CreateEmpty() 54 | 55 | let (|ExpectedTableName|_|) name = if name = fixture.TableName then Some() else None 56 | 57 | let collector = TestCollector() 58 | let sut = rawTable.WithMetricsCollector(collector.Collect) 59 | 60 | [] 61 | let ``Collect Metrics on TryGetItem`` () = async { 62 | let! result = 63 | let nonExistentHk = guid () 64 | sut.TryGetItemAsync(key = TableKey.Combined(nonExistentHk, 0)) 65 | None =! result 66 | 67 | test 68 | <@ 69 | match collector.Metrics with 70 | | [ { ItemCount = 0 71 | Operation = GetItem 72 | TableName = ExpectedTableName 73 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 74 | | _ -> false 75 | @> 76 | } 77 | 78 | [] 79 | let ``Collect Metrics on PutItem`` () = 80 | let item = mkItem (guid ()) (guid ()) 0 81 | let _ = sut.PutItem item 82 | 83 | test 84 | <@ 85 | match collector.Metrics with 86 | | [ { ItemCount = 1 87 | Operation = PutItem 88 | TableName = ExpectedTableName 89 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 90 | | _ -> false 91 | @> 92 | 93 | let compile = rawTable.Template.PrecomputeConditionalExpr 94 | 95 | [] 96 | let ``Collect Metrics on Transactional PutItem`` () = async { 97 | let item = mkItem (guid ()) (guid ()) 0 98 | let _ = sut.PutItem item 99 | let simpleCu = 100 | trap 101 | <@ 102 | match collector.Metrics with 103 | | [ { ConsumedCapacity = TotalCu cu } ] -> cu 104 | | x -> failwithf "Unexpected %A" x 105 | @> 106 | collector.Clear() 107 | 108 | let item = mkItem (guid ()) (guid ()) 0 109 | let transaction = Transaction(sut.Client, collector.Collect) 110 | transaction.Put(sut, item, compile <@ fun t -> NOT_EXISTS t.RangeKey @>) 111 | do! transaction.TransactWriteItems() 112 | 113 | test 114 | <@ 115 | match collector.Metrics with 116 | | [ { ItemCount = 1 117 | Operation = TransactWriteItems 118 | TableName = ExpectedTableName 119 | ConsumedCapacity = TotalCu cu } ] -> cu >= simpleCu * 2. // doing it transactionally costs at least double 120 | | _ -> false 121 | @> 122 | 123 | let! itemFound = sut.ContainsKeyAsync(sut.Template.ExtractKey item) 124 | test <@ itemFound @> 125 | } 126 | 127 | [] 128 | let ``No Metrics on Canceled PutItem`` () = async { 129 | let collector = TestCollector() 130 | let sut = rawTable.WithMetricsCollector(collector.Collect) 131 | 132 | let item = mkItem (guid ()) (guid ()) 0 133 | let transaction = rawTable.CreateTransaction() 134 | transaction.Put(sut, item, compile <@ fun t -> EXISTS t.RangeKey @>) 135 | let mutable failed = false 136 | try 137 | do! 138 | // The check will fail, which triggers a throw from the underlying AWS SDK; there's no way to extract the consumption info in that case 139 | transaction.TransactWriteItems() 140 | with Transaction.TransactionCanceledConditionalCheckFailed -> 141 | failed <- true 142 | true =! failed 143 | [] =! collector.Metrics 144 | } 145 | 146 | interface IClassFixture 147 | 148 | /// Tests that look up a specific item. Each test run gets a fresh individual item 149 | type ItemTests(fixture: TableFixture) = 150 | 151 | let rawTable = fixture.CreateEmpty() 152 | let (|ExpectedTableName|_|) name = if name = fixture.TableName then Some() else None 153 | 154 | let item = mkItem (guid ()) (guid ()) 0 155 | do rawTable.PutItem item |> ignore 156 | 157 | let collector = TestCollector() 158 | let sut = rawTable.WithMetricsCollector(collector.Collect) 159 | 160 | [] 161 | let ``Collect Metrics on GetItem`` () = 162 | let _ = sut.GetItem(key = TableKey.Combined(item.HashKey, 0)) 163 | 164 | test 165 | <@ 166 | match collector.Metrics with 167 | | [ { ItemCount = 1 168 | Operation = GetItem 169 | TableName = ExpectedTableName 170 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 171 | | _ -> false 172 | @> 173 | 174 | [] 175 | let ``Collect Metrics on ContainsKey`` () = 176 | let _ = sut.ContainsKey(key = TableKey.Combined(item.HashKey, 0)) 177 | 178 | test 179 | <@ 180 | match collector.Metrics with 181 | | [ { ItemCount = 1 182 | Operation = GetItem 183 | TableName = ExpectedTableName 184 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 185 | | _ -> false 186 | @> 187 | 188 | [] 189 | let ``Collect Metrics on UpdateItem`` () = 190 | let _ = 191 | sut.UpdateItem(TableKey.Combined(item.HashKey, item.RangeKey), <@ fun (i: MetricsRecord) -> { i with LocalAttribute = 1000 } @>) 192 | 193 | test 194 | <@ 195 | match collector.Metrics with 196 | | [ { ItemCount = 1 197 | Operation = UpdateItem 198 | TableName = ExpectedTableName 199 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 200 | | _ -> false 201 | @> 202 | 203 | [] 204 | let ``Collect Metrics on DeleteItem`` () = 205 | let _ = sut.DeleteItem(TableKey.Combined(item.HashKey, item.RangeKey)) 206 | 207 | test 208 | <@ 209 | match collector.Metrics with 210 | | [ { ItemCount = 1 211 | Operation = DeleteItem 212 | TableName = ExpectedTableName 213 | ConsumedCapacity = TotalCu cu } ] -> cu > 0 214 | | _ -> false 215 | @> 216 | 217 | interface IClassFixture 218 | 219 | /// Heavy tests reliant on establishing (and mutating) multiple items. Separate Test Class so Xunit will run them in parallel with others 220 | type BulkMutationTests(fixture: TableFixture) = 221 | 222 | let rawTable = fixture.CreateEmpty() 223 | let (|ExpectedTableName|_|) name = if name = fixture.TableName then Some() else None 224 | 225 | // NOTE we mutate the items so they need to be established each time 226 | let items = 227 | let hk, gsk = guid (), guid () 228 | [| for i in 0..24 -> mkItem hk gsk i |] 229 | do 230 | for item in items do 231 | rawTable.PutItem item |> ignore 232 | 233 | let collector = TestCollector() 234 | let sut = rawTable.WithMetricsCollector(collector.Collect) 235 | 236 | [] 237 | let ``Collect Metrics on BatchPutItem`` () = 238 | let _results = sut.BatchPutItems(items |> Seq.map (fun i -> { i with LocalAttribute = 1000 })) 239 | 240 | test 241 | <@ 242 | match collector.Metrics with 243 | | [ { ItemCount = c 244 | Operation = BatchWriteItems 245 | TableName = ExpectedTableName 246 | ConsumedCapacity = TotalCu cu } ] -> c = items.Length && cu > 0 247 | | _ -> false 248 | @> 249 | 250 | [] 251 | let ``Collect Metrics on BatchDeleteItem`` () = 252 | let _keys = sut.BatchDeleteItems(items |> Seq.map (fun i -> TableKey.Combined(i.HashKey, i.RangeKey))) 253 | 254 | test 255 | <@ 256 | match collector.Metrics with 257 | | [ { ItemCount = c 258 | Operation = BatchWriteItems 259 | TableName = ExpectedTableName 260 | ConsumedCapacity = TotalCu cu } ] -> c = items.Length && cu > 0 261 | | _ -> false 262 | @> 263 | 264 | interface IClassFixture 265 | 266 | /// TableFixture with 1000 items with a known HashKey pre-inserted 267 | type ManyReadOnlyItemsFixture() = 268 | inherit TableFixture() 269 | 270 | // TOCONSIDER shift this into IAsyncLifetime.InitializeAsync 271 | let table = base.CreateEmpty() 272 | 273 | let hk = guid () 274 | do 275 | let gsk = guid () 276 | let items = [| for i in 0..99 -> mkItem hk gsk i |] 277 | for item in items do 278 | table.PutItem item |> ignore 279 | 280 | member _.Table = table 281 | member _.HashKey = hk 282 | 283 | /// NOTE These tests share the prep work of making a Table Containing lots of items to read 284 | // DO NOT add tests that will delete or mutate those items 285 | type ``Bulk Read Operations``(fixture: ManyReadOnlyItemsFixture) = 286 | 287 | let (|ExpectedTableName|_|) name = if name = fixture.TableName then Some() else None 288 | 289 | let collector = TestCollector() 290 | let sut = fixture.Table.WithMetricsCollector(collector.Collect) 291 | 292 | [] 293 | let ``Collect Metrics on Scan`` () = 294 | let items = sut.Scan() 295 | 296 | test 297 | <@ 298 | match collector.Metrics with 299 | | [ { ItemCount = c 300 | Operation = Scan 301 | TableName = ExpectedTableName 302 | ConsumedCapacity = TotalCu cu } ] -> c = items.Length && cu > 0 303 | | _ -> false 304 | @> 305 | 306 | [] 307 | let ``Collect Metrics on Query`` () = 308 | let items = sut.Query(<@ fun (r: MetricsRecord) -> r.HashKey = fixture.HashKey @>) 309 | 310 | test 311 | <@ 312 | match collector.Metrics with 313 | | [ { ItemCount = c 314 | Operation = Query 315 | TableName = ExpectedTableName 316 | ConsumedCapacity = TotalCu cu } ] -> c = items.Length && cu > 0 317 | | _ -> false 318 | @> 319 | 320 | [] 321 | let ``Collect Metrics on BatchGetItem`` () = 322 | let items = sut.BatchGetItems(seq { for i in 0..99 -> TableKey.Combined(fixture.HashKey, i) }) 323 | 324 | test 325 | <@ 326 | match collector.Metrics with 327 | | [ { ItemCount = c 328 | Operation = BatchGetItems 329 | TableName = ExpectedTableName 330 | ConsumedCapacity = TotalCu cu } ] -> c = items.Length && cu > 0 331 | | _ -> false 332 | @> 333 | 334 | interface IClassFixture 335 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/MultipleKeyAttributeTests.fs: -------------------------------------------------------------------------------- 1 | module FSharp.AWS.DynamoDB.Tests.MultipleKeyAttributeTests 2 | 3 | open System 4 | 5 | open Swensen.Unquote 6 | open Xunit 7 | 8 | open FSharp.AWS.DynamoDB 9 | open FSharp.AWS.DynamoDB.Scripting 10 | 11 | [] 12 | module MultiKeyTypes = 13 | 14 | type InverseKeyRecord = 15 | { [] 16 | [] 17 | PrimaryKey: string 18 | [] 19 | [] 20 | SortKey: string } 21 | 22 | type SharedRangeKeyRecord = 23 | { [] 24 | HashKey: string 25 | [] 26 | GSI1: string 27 | [] 28 | GSI2: string 29 | [] 30 | [] 31 | SortKey: string } 32 | 33 | type ``Inverse GSI Table Operation Tests``(fixture: TableFixture) = 34 | 35 | let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 36 | let mkItem () = 37 | { PrimaryKey = ((int (rand ())) % 50).ToString() 38 | SortKey = ((int (rand ())) % 50).ToString() } 39 | 40 | let table = fixture.CreateEmpty() 41 | 42 | [] 43 | let ``Query by Table Key and GSI`` () = 44 | let values = set [ for _ in 1L .. 1000L -> mkItem () ] 45 | for batch in values |> Set.toSeq |> Seq.chunkBySize 25 do 46 | table.BatchPutItems batch =! [||] 47 | let queriedTable = table.Query <@ fun (i: InverseKeyRecord) -> i.PrimaryKey = "1" && i.SortKey.StartsWith "2" @> 48 | test <@ set queriedTable = set (values |> Set.filter (fun i -> i.PrimaryKey = "1" && i.SortKey.StartsWith "2")) @> 49 | let queriedGSI = table.Query <@ fun (i: InverseKeyRecord) -> i.SortKey = "1" && i.PrimaryKey.StartsWith "2" @> 50 | test <@ set queriedGSI = set (values |> Set.filter (fun i -> i.SortKey = "1" && i.PrimaryKey.StartsWith "2")) @> 51 | 52 | interface IClassFixture 53 | 54 | type ``Shared Range Key Table Operation Tests``(fixture: TableFixture) = 55 | 56 | let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 57 | 58 | let mkItem () = 59 | { HashKey = guid () 60 | GSI1 = ((int (rand ())) % 5).ToString() 61 | GSI2 = ((int (rand ())) % 5 + 20).ToString() 62 | SortKey = ((int (rand ())) % 50).ToString() } 63 | 64 | let table = fixture.CreateEmpty() 65 | 66 | [] 67 | let ``Query by GSIs with shared range key`` () = 68 | let values = set [ for _ in 1L .. 1000L -> mkItem () ] 69 | for batch in values |> Set.toSeq |> Seq.chunkBySize 25 do 70 | table.BatchPutItems batch =! [||] 71 | let queried1 = table.Query <@ fun (i: SharedRangeKeyRecord) -> i.GSI1 = "1" && i.SortKey = "23" @> 72 | test <@ set queried1 = set (values |> Set.filter (fun i -> i.GSI1 = "1" && i.SortKey = "23")) @> 73 | let queried2 = table.Query <@ fun (i: SharedRangeKeyRecord) -> i.GSI2 = "2" && i.SortKey = "25" @> 74 | test <@ set queried2 = set (values |> Set.filter (fun i -> i.GSI2 = "2" && i.SortKey = "25")) @> 75 | 76 | interface IClassFixture 77 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/PaginationTests.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB.Tests 2 | 3 | open System 4 | 5 | open Swensen.Unquote 6 | open Xunit 7 | 8 | open FSharp.AWS.DynamoDB 9 | open FSharp.AWS.DynamoDB.Scripting 10 | 11 | [] 12 | module PaginationTests = 13 | 14 | type PaginationRecord = 15 | { [] 16 | HashKey: string 17 | [] 18 | RangeKey: string 19 | 20 | [] 21 | LocalSecondaryRangeKey: string 22 | 23 | [] 24 | SecondaryHashKey: string 25 | [] 26 | SecondaryRangeKey: string 27 | 28 | LocalAttribute: int } 29 | 30 | 31 | type ``Pagination Tests``(fixture: TableFixture) = 32 | 33 | let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 34 | let mkItem (hk: string) (gshk: string) : PaginationRecord = 35 | { HashKey = hk 36 | RangeKey = guid () 37 | LocalSecondaryRangeKey = guid () 38 | SecondaryHashKey = gshk 39 | SecondaryRangeKey = guid () 40 | LocalAttribute = int (rand () % 2L) } 41 | 42 | let table = fixture.CreateEmpty() 43 | 44 | [] 45 | let ``Paginated Query on Primary Key`` () = 46 | let hk = guid () 47 | let gsk = guid () 48 | let items = seq { for _ in 0..9 -> mkItem hk gsk } |> Seq.toArray |> Array.sortBy (fun r -> r.RangeKey) 49 | for item in items do 50 | table.PutItem item =! TableKey.Combined(item.HashKey, item.RangeKey) 51 | let res1 = table.QueryPaginated(<@ fun r -> r.HashKey = hk @>, limit = 5) 52 | let res2 = table.QueryPaginated(<@ fun r -> r.HashKey = hk @>, limit = 5, ?exclusiveStartKey = res1.LastEvaluatedKey) 53 | let res3 = table.QueryPaginated(<@ fun r -> r.HashKey = hk @>, limit = 5, ?exclusiveStartKey = res2.LastEvaluatedKey) 54 | test <@ None <> res1.LastEvaluatedKey && None <> res2.LastEvaluatedKey && None = res3.LastEvaluatedKey @> 55 | test <@ items = Array.append res1.Records res2.Records && Array.isEmpty res3.Records @> 56 | 57 | [] 58 | let ``Paginated Query on LSI`` () = 59 | let hk = guid () 60 | let gsk = guid () 61 | let items = 62 | seq { for _ in 0..9 -> mkItem hk gsk } 63 | |> Seq.toArray 64 | |> Array.sortBy (fun r -> r.LocalSecondaryRangeKey) 65 | for item in items do 66 | table.PutItem item =! TableKey.Combined(item.HashKey, item.RangeKey) 67 | let res1 = table.QueryPaginated(<@ fun r -> r.HashKey = hk && r.LocalSecondaryRangeKey > "0" @>, limit = 5) 68 | let res2 = 69 | table.QueryPaginated( 70 | <@ fun r -> r.HashKey = hk && r.LocalSecondaryRangeKey > "0" @>, 71 | limit = 5, 72 | ?exclusiveStartKey = res1.LastEvaluatedKey 73 | ) 74 | let res3 = 75 | table.QueryPaginated( 76 | <@ fun r -> r.HashKey = hk && r.LocalSecondaryRangeKey > "0" @>, 77 | limit = 5, 78 | ?exclusiveStartKey = res2.LastEvaluatedKey 79 | ) 80 | test <@ None <> res1.LastEvaluatedKey && None <> res2.LastEvaluatedKey && None = res3.LastEvaluatedKey @> 81 | test <@ items = Array.append res1.Records res2.Records && Array.isEmpty res3.Records @> 82 | 83 | [] 84 | let ``Paginated Query on GSI`` () = 85 | let hk = guid () 86 | let gsk = guid () 87 | let items = seq { for _ in 0..9 -> mkItem hk gsk } |> Seq.toArray |> Array.sortBy (fun r -> r.SecondaryRangeKey) 88 | for item in items do 89 | table.PutItem item =! TableKey.Combined(item.HashKey, item.RangeKey) 90 | let res1 = table.QueryPaginated(<@ fun r -> r.SecondaryHashKey = gsk @>, limit = 5) 91 | let res2 = table.QueryPaginated(<@ fun r -> r.SecondaryHashKey = gsk @>, limit = 5, ?exclusiveStartKey = res1.LastEvaluatedKey) 92 | let res3 = table.QueryPaginated(<@ fun r -> r.SecondaryHashKey = gsk @>, limit = 5, ?exclusiveStartKey = res2.LastEvaluatedKey) 93 | test <@ None <> res1.LastEvaluatedKey && None <> res2.LastEvaluatedKey && None = res3.LastEvaluatedKey @> 94 | test <@ items = Array.append res1.Records res2.Records && Array.isEmpty res3.Records @> 95 | 96 | [] 97 | let ``Paginated Query with filter`` () = 98 | let hk = guid () 99 | let gsk = guid () 100 | let items = seq { for _ in 0..49 -> mkItem hk gsk } |> Seq.toArray |> Array.sortBy (fun r -> r.RangeKey) 101 | for item in items do 102 | table.PutItem item =! TableKey.Combined(item.HashKey, item.RangeKey) 103 | let res = table.QueryPaginated(<@ fun r -> r.HashKey = hk @>, filterCondition = <@ fun r -> r.LocalAttribute = 0 @>, limit = 5) 104 | test <@ items |> Array.filter (fun r -> r.LocalAttribute = 0) |> Array.take 5 = res.Records @> 105 | 106 | interface IClassFixture 107 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/ProjectionExpressionTests.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB.Tests 2 | 3 | open System 4 | 5 | open Microsoft.FSharp.Quotations 6 | 7 | open Swensen.Unquote 8 | open Xunit 9 | 10 | open FSharp.AWS.DynamoDB 11 | open FSharp.AWS.DynamoDB.Scripting 12 | 13 | [] 14 | module ProjectionExprTypes = 15 | 16 | [] 17 | type Enum = 18 | | A = 1 19 | | B = 2 20 | | C = 4 21 | 22 | type Nested = { NV: string; NE: Enum } 23 | 24 | type Union = 25 | | UA of int64 26 | | UB of string 27 | 28 | type ProjectionExprRecord = 29 | { [] 30 | HashKey: string 31 | [] 32 | RangeKey: string 33 | 34 | Value: int64 35 | 36 | String: string 37 | 38 | Tuple: int64 * int64 39 | 40 | Nested: Nested 41 | 42 | NestedList: Nested list 43 | 44 | TimeSpan: TimeSpan 45 | 46 | DateTimeOffset: DateTimeOffset 47 | 48 | Guid: Guid 49 | 50 | Bool: bool 51 | 52 | Bytes: byte[] 53 | 54 | Ref: string ref 55 | 56 | Union: Union 57 | 58 | Unions: Union list 59 | 60 | Optional: string option 61 | 62 | List: int64 list 63 | 64 | Map: Map 65 | 66 | IntSet: Set 67 | 68 | StringSet: Set 69 | 70 | ByteSet: Set } 71 | 72 | type R = ProjectionExprRecord 73 | 74 | type ``Projection Expression Tests``(fixture: TableFixture) = 75 | 76 | static let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 77 | 78 | let bytes () = Guid.NewGuid().ToByteArray() 79 | let mkItem () = 80 | { HashKey = guid () 81 | RangeKey = guid () 82 | String = guid () 83 | Value = rand () 84 | Tuple = rand (), rand () 85 | TimeSpan = TimeSpan.FromTicks(rand ()) 86 | DateTimeOffset = DateTimeOffset.Now 87 | Guid = Guid.NewGuid() 88 | Bool = false 89 | Optional = Some(guid ()) 90 | Ref = ref (guid ()) 91 | Bytes = Guid.NewGuid().ToByteArray() 92 | Nested = { NV = guid (); NE = enum (int (rand ()) % 3) } 93 | NestedList = [ { NV = guid (); NE = enum (int (rand ()) % 3) } ] 94 | Map = seq { for _ in 0L .. rand () % 5L -> "K" + guid (), rand () } |> Map.ofSeq 95 | IntSet = seq { for _ in 0L .. rand () % 5L -> rand () } |> Set.ofSeq 96 | StringSet = seq { for _ in 0L .. rand () % 5L -> guid () } |> Set.ofSeq 97 | ByteSet = seq { for _ in 0L .. rand () % 5L -> bytes () } |> Set.ofSeq 98 | List = [ for _ in 0L .. rand () % 5L -> rand () ] 99 | Union = if rand () % 2L = 0L then UA(rand ()) else UB(guid ()) 100 | Unions = [ for _ in 0L .. rand () % 5L -> if rand () % 2L = 0L then UA(rand ()) else UB(guid ()) ] } 101 | 102 | let table = fixture.CreateEmpty() 103 | 104 | [] 105 | let ``Should fail on invalid projections`` () = 106 | let testProj (p: Expr 'T>) = 107 | fun () -> proj p 108 | |> shouldFailwith<_, ArgumentException> 109 | 110 | testProj <@ fun _ -> 1 @> 111 | testProj <@ fun _ -> Guid.Empty @> 112 | testProj <@ fun r -> not r.Bool @> 113 | testProj <@ fun r -> r.List[0] + 1L @> 114 | 115 | [] 116 | let ``Should fail on conflicting projections`` () = 117 | let testProj (p: Expr 'T>) = 118 | fun () -> proj p 119 | |> shouldFailwith<_, ArgumentException> 120 | 121 | testProj <@ fun r -> r.Bool, r.Bool @> 122 | testProj <@ fun r -> r.NestedList[0].NE, r.NestedList[0] @> 123 | 124 | [] 125 | let ``Null value projection`` () = 126 | let item = mkItem () 127 | let key = table.PutItem(item) 128 | table.GetItemProjected(key, <@ fun _ -> () @>) 129 | table.GetItemProjected(key, <@ ignore @>) 130 | 131 | [] 132 | let ``Single value projection`` () = 133 | let item = mkItem () 134 | let key = table.PutItem(item) 135 | let guid = table.GetItemProjected(key, <@ fun r -> r.Guid @>) 136 | test <@ item.Guid = guid @> 137 | 138 | [] 139 | let ``Map projection`` () = 140 | let item = mkItem () 141 | let key = table.PutItem(item) 142 | let map = table.GetItemProjected(key, <@ fun r -> r.Map @>) 143 | test <@ item.Map = map @> 144 | 145 | [] 146 | let ``Option-None projection`` () = 147 | let item = { mkItem () with Optional = None } 148 | let key = table.PutItem(item) 149 | let opt = table.GetItemProjected(key, <@ fun r -> r.Optional @>) 150 | test <@ None = opt @> 151 | 152 | [] 153 | let ``Option-Some projection`` () = 154 | let item = { mkItem () with Optional = Some "test" } 155 | let key = table.PutItem(item) 156 | let opt = table.GetItemProjected(key, <@ fun r -> r.Optional @>) 157 | test <@ item.Optional = opt @> 158 | 159 | [] 160 | let ``Multi-value projection`` () = 161 | let item = mkItem () 162 | let key = table.PutItem(item) 163 | let result = table.GetItemProjected(key, <@ fun r -> r.Bool, r.ByteSet, r.Bytes @>) 164 | test <@ (item.Bool, item.ByteSet, item.Bytes) = result @> 165 | 166 | [] 167 | let ``Nested value projection 1`` () = 168 | let item = { mkItem () with Map = Map.ofList [ "Nested", 42L ] } 169 | let key = table.PutItem(item) 170 | let result = table.GetItemProjected(key, <@ fun r -> r.Nested.NV, r.NestedList[0].NV, r.Map["Nested"] @>) 171 | test <@ (item.Nested.NV, item.NestedList[0].NV, item.Map["Nested"]) = result @> 172 | 173 | [] 174 | let ``Nested value projection 2`` () = 175 | let item = { mkItem () with List = [ 1L; 2L; 3L ] } 176 | let key = table.PutItem(item) 177 | let result = table.GetItemProjected(key, <@ fun r -> r.List[0], r.List[1] @>) 178 | test <@ (item.List[0], item.List[1]) = result @> 179 | 180 | [] 181 | let ``Projected query`` () = 182 | let hKey = guid () 183 | 184 | seq { for i in 1..200 -> { mkItem () with HashKey = hKey; RangeKey = string i } } 185 | |> Seq.splitInto 25 186 | |> Seq.map table.BatchPutItemsAsync 187 | |> Async.Parallel 188 | |> Async.Ignore 189 | |> Async.RunSynchronously 190 | 191 | let results = table.QueryProjected(<@ fun r -> r.HashKey = hKey @>, <@ fun r -> r.RangeKey @>) 192 | test <@ set [ 1..200 ] = (results |> Seq.map int |> set) @> 193 | 194 | [] 195 | let ``Projected scan`` () = 196 | let hKey = guid () 197 | 198 | seq { for i in 1..200 -> { mkItem () with HashKey = hKey; RangeKey = string i } } 199 | |> Seq.splitInto 25 200 | |> Seq.map table.BatchPutItemsAsync 201 | |> Async.Parallel 202 | |> Async.Ignore 203 | |> Async.RunSynchronously 204 | 205 | let results = table.ScanProjected(<@ fun r -> r.RangeKey @>, filterCondition = <@ fun r -> r.HashKey = hKey @>) 206 | test <@ set [ 1..200 ] = (results |> Seq.map int |> set) @> 207 | 208 | interface IClassFixture 209 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/SimpleTableOperationTests.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB.Tests 2 | 3 | open System 4 | 5 | open FSharp.AWS.DynamoDB.Tests 6 | open Swensen.Unquote 7 | open Xunit 8 | 9 | open FSharp.AWS.DynamoDB 10 | open FSharp.AWS.DynamoDB.Scripting // These tests lean on the Synchronous wrappers 11 | 12 | [] 13 | module SimpleTableTypes = 14 | 15 | type SimpleRecord = 16 | { [] 17 | HashKey: string 18 | [] 19 | RangeKey: string 20 | 21 | EmptyString: string 22 | 23 | Value: int64 24 | 25 | Tuple: int64 * int64 26 | 27 | Map: Map 28 | 29 | Unions: Choice list } 30 | 31 | [] 32 | type CompatibleRecord = 33 | { [] 34 | Id: string 35 | 36 | Values: Set } 37 | 38 | [] 39 | type CompatibleRecord2 = 40 | { [] 41 | Id: string 42 | 43 | Values: Set } 44 | 45 | type ``Simple Table Operation Tests``(fixture: TableFixture) = 46 | 47 | let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 48 | let mkItem () = 49 | { HashKey = guid () 50 | RangeKey = guid () 51 | EmptyString = "" 52 | Value = rand () 53 | Tuple = rand (), rand () 54 | Map = seq { for _ in 0L .. rand () % 5L -> "K" + guid (), rand () } |> Map.ofSeq 55 | Unions = [ Choice1Of3(guid ()); Choice2Of3(rand ()); Choice3Of3(Guid.NewGuid().ToByteArray()) ] } 56 | 57 | let table = fixture.CreateEmpty() 58 | 59 | [] 60 | let ``Convert to compatible table`` () = 61 | let table' = table.WithRecordType() 62 | test <@ table.PrimaryKey = table'.PrimaryKey @> 63 | 64 | [] 65 | let ``Convert to compatible table 2`` () = 66 | let table' = table.WithRecordType() 67 | test <@ table.PrimaryKey = table'.PrimaryKey @> 68 | 69 | [] 70 | let ``Simple Put Operation`` () = 71 | let value = mkItem () 72 | let key = table.PutItem value 73 | let value' = table.GetItem key 74 | test <@ value = value' @> 75 | 76 | [] 77 | let ``ContainsKey Operation`` () = 78 | let value = mkItem () 79 | let key = table.PutItem value 80 | test <@ table.ContainsKey key @> 81 | let _ = table.DeleteItem key 82 | test <@ not (table.ContainsKey key) @> 83 | 84 | [] 85 | let ``TryGet Operation`` () = 86 | let value = mkItem () 87 | let computedKey = table.Template.ExtractKey value 88 | let get k = table.TryGetItemAsync k |> Async.RunSynchronously 89 | let initialLoad = get computedKey 90 | test <@ None = initialLoad @> 91 | let key = table.PutItem value 92 | test <@ computedKey = key @> // "Local key computation should be same as Put result" 93 | let loaded = get computedKey 94 | test <@ Some value = loaded @> 95 | let _ = table.DeleteItem key 96 | test <@ None = get key @> 97 | 98 | [] 99 | let ``Batch Put Operation`` () = 100 | let values = set [ for _ in 1L .. 20L -> mkItem () ] 101 | let unprocessed = table.BatchPutItems values 102 | let values' = 103 | table.BatchGetItems(values |> Seq.map (fun r -> TableKey.Combined(r.HashKey, r.RangeKey))) 104 | |> Set.ofArray 105 | test <@ Array.isEmpty unprocessed @> 106 | test <@ values = values' @> 107 | 108 | [] 109 | let ``Batch Delete Operation`` () = 110 | let values = set [ for _ in 1L .. 20L -> mkItem () ] 111 | table.BatchPutItems values |> ignore 112 | let unprocessed = table.BatchDeleteItems(values |> Seq.map (fun r -> TableKey.Combined(r.HashKey, r.RangeKey))) 113 | test <@ Array.isEmpty unprocessed @> 114 | let values' = table.BatchGetItems(values |> Seq.map (fun r -> TableKey.Combined(r.HashKey, r.RangeKey))) 115 | test <@ Array.isEmpty values' @> 116 | 117 | [] 118 | let ``Simple Delete Operation`` () = 119 | let item = mkItem () 120 | let key = table.PutItem item 121 | test <@ table.ContainsKey key @> 122 | let item' = table.DeleteItem key 123 | test <@ Some item = item' @> 124 | test <@ not (table.ContainsKey key) @> 125 | 126 | [] 127 | let ``Idempotent Delete Operation`` () = 128 | let item = mkItem () 129 | let key = table.PutItem item 130 | test <@ table.ContainsKey key @> 131 | let item' = table.DeleteItem key 132 | test <@ item' = Some item @> 133 | let deletedItem = table.DeleteItem key 134 | test <@ None = deletedItem @> 135 | test <@ not (table.ContainsKey key) @> 136 | 137 | [] 138 | [] 141 | [] 144 | [] 147 | let ``Operations with empty key values should fail with a DynamoDB client error`` (hashKey, rangeKey, expectedErrorMsg) = 148 | let value = { mkItem () with HashKey = hashKey; RangeKey = rangeKey } 149 | try 150 | table.PutItem value |> ignore 151 | with :? Amazon.DynamoDBv2.AmazonDynamoDBException as ex -> 152 | test <@ ex.Message = expectedErrorMsg @> 153 | 154 | interface IClassFixture 155 | 156 | type ``TransactWriteItems tests``(table1: TableFixture, table2: TableFixture) = 157 | 158 | let randInt64 = let r = Random.Shared in fun () -> int64 <| r.Next() 159 | let randInt = let r = Random.Shared in fun () -> int32 <| r.Next() 160 | let mkItem () = 161 | { HashKey = guid () 162 | RangeKey = guid () 163 | EmptyString = "" 164 | Value = randInt64 () 165 | Tuple = randInt64 (), randInt64 () 166 | Map = seq { for _ in 0L .. randInt64 () % 5L -> "K" + guid (), randInt64 () } |> Map.ofSeq 167 | Unions = [ Choice1Of3(guid ()); Choice2Of3(randInt64 ()); Choice3Of3(Guid.NewGuid().ToByteArray()) ] } 168 | 169 | let mkCompatibleItem () : CompatibleRecord = { Id = guid (); Values = set [ for _ in 0 .. (randInt () % 5) -> randInt () ] } 170 | 171 | let table1 = table1.CreateEmpty() 172 | let table2 = table2.CreateEmpty() 173 | let compileTable1 = table1.Template.PrecomputeConditionalExpr 174 | let compileTable2 = table2.Template.PrecomputeConditionalExpr 175 | let compileUpdateTable1 (e: Quotations.Expr SimpleRecord>) = table1.Template.PrecomputeUpdateExpr e 176 | let compileUpdateTable2 (e: Quotations.Expr SimpleRecord>) = table1.Template.PrecomputeUpdateExpr e 177 | let doesntExistConditionTable1 = compileTable1 <@ fun t -> NOT_EXISTS t.Value @> 178 | let doesntExistConditionTable2 = compileTable2 <@ fun t -> NOT_EXISTS t.Values @> 179 | let existsConditionTable1 = compileTable1 <@ fun t -> EXISTS t.Value @> 180 | let existsConditionTable2 = compileTable2 <@ fun t -> EXISTS t.Values @> 181 | 182 | [] 183 | let ``Minimal happy path`` () = async { 184 | let item = mkItem () 185 | let transaction = table1.CreateTransaction() 186 | transaction.Put(table1, item, doesntExistConditionTable1) 187 | do! transaction.TransactWriteItems() 188 | 189 | let! itemFound = table1.ContainsKeyAsync(table1.Template.ExtractKey item) 190 | true =! itemFound 191 | } 192 | 193 | [] 194 | let ``Minimal happy path with multiple tables`` () = async { 195 | let item = mkItem () 196 | let compatibleItem = mkCompatibleItem () 197 | 198 | let transaction = table1.CreateTransaction() 199 | transaction.Put(table1, item, doesntExistConditionTable1) 200 | transaction.Put(table2, compatibleItem, doesntExistConditionTable2) 201 | do! transaction.TransactWriteItems() 202 | 203 | let! itemFound = table1.ContainsKeyAsync(table1.Template.ExtractKey item) 204 | true =! itemFound 205 | 206 | let! compatibleItemFound = table2.ContainsKeyAsync(table2.Template.ExtractKey compatibleItem) 207 | true =! compatibleItemFound 208 | } 209 | 210 | [] 211 | let ``Minimal Canceled path`` () = async { 212 | let item = mkItem () 213 | 214 | let transaction = table1.CreateTransaction() 215 | transaction.Put(table1, item, existsConditionTable1) 216 | let mutable failed = false 217 | try 218 | do! transaction.TransactWriteItems() 219 | with Transaction.TransactionCanceledConditionalCheckFailed -> 220 | failed <- true 221 | 222 | true =! failed 223 | 224 | let! itemFound = table1.ContainsKeyAsync(table1.Template.ExtractKey item) 225 | false =! itemFound 226 | } 227 | 228 | [] 229 | let ``ConditionCheck outcome should affect sibling TransactWrite`` shouldFail = async { 230 | let item, item2 = mkItem (), mkItem () 231 | let! key = table1.PutItemAsync item 232 | 233 | let transaction = table1.CreateTransaction() 234 | if shouldFail then 235 | transaction.Check(table1, key, doesntExistConditionTable1) 236 | else 237 | transaction.Check(table1, key, existsConditionTable1) 238 | transaction.Put(table1, item2) 239 | 240 | let mutable failed = false 241 | try 242 | do! transaction.TransactWriteItems() 243 | with Transaction.TransactionCanceledConditionalCheckFailed -> 244 | failed <- true 245 | 246 | failed =! shouldFail 247 | 248 | let! item2Found = table1.ContainsKeyAsync(table1.Template.ExtractKey item2) 249 | failed =! not item2Found 250 | } 251 | 252 | [] 253 | let ``All paths`` shouldFail = async { 254 | let item, item2, item3, item4, item5, item6, item7 = mkItem (), mkItem (), mkItem (), mkItem (), mkItem (), mkItem (), mkItem () 255 | let! key = table1.PutItemAsync item 256 | let transaction = table1.CreateTransaction() 257 | 258 | let requests = 259 | [ transaction.Update(table1, key, compileUpdateTable1 <@ fun t -> { t with Value = 42 } @>, existsConditionTable1) 260 | transaction.Put(table1, item2) 261 | transaction.Put(table1, item3, doesntExistConditionTable1) 262 | transaction.Delete(table1, table1.Template.ExtractKey item4, doesntExistConditionTable1) 263 | transaction.Delete(table1, table1.Template.ExtractKey item5) 264 | transaction.Check( 265 | table1, 266 | table1.Template.ExtractKey item6, 267 | (if shouldFail then 268 | existsConditionTable1 269 | else 270 | doesntExistConditionTable1) 271 | ) 272 | transaction.Update( 273 | table1, 274 | TableKey.Combined(item7.HashKey, item7.RangeKey), 275 | compileUpdateTable1 <@ fun t -> { t with Tuple = (42, 42) } @> 276 | ) ] 277 | let mutable failed = false 278 | try 279 | do! transaction.TransactWriteItems() 280 | with Transaction.TransactionCanceledConditionalCheckFailed -> 281 | failed <- true 282 | failed =! shouldFail 283 | 284 | let! maybeItem = table1.TryGetItemAsync key 285 | test <@ shouldFail <> (maybeItem |> Option.contains { item with Value = 42 }) @> 286 | 287 | let! maybeItem2 = table1.TryGetItemAsync(table1.Template.ExtractKey item2) 288 | test <@ shouldFail <> (maybeItem2 |> Option.contains item2) @> 289 | 290 | let! maybeItem3 = table1.TryGetItemAsync(table1.Template.ExtractKey item3) 291 | test <@ shouldFail <> (maybeItem3 |> Option.contains item3) @> 292 | 293 | let! maybeItem7 = table1.TryGetItemAsync(table1.Template.ExtractKey item7) 294 | test <@ shouldFail <> (maybeItem7 |> Option.map (fun x -> x.Tuple) |> Option.contains (42, 42)) @> 295 | } 296 | 297 | let shouldBeRejectedWithArgumentOutOfRangeException (builder: Transaction) = async { 298 | let! e = Async.Catch(builder.TransactWriteItems()) 299 | test 300 | <@ 301 | match e with 302 | | Choice1Of2() -> false 303 | | Choice2Of2 e -> e :? ArgumentOutOfRangeException 304 | @> 305 | } 306 | 307 | [] 308 | let ``Empty request list is rejected with AORE`` () = 309 | shouldBeRejectedWithArgumentOutOfRangeException (Transaction(table1.Client)) 310 | |> Async.RunSynchronously 311 | 312 | [] 313 | let ``Over 100 writes are rejected with AORE`` () = 314 | let Transaction = Transaction(table1.Client) 315 | for _x in 1..101 do 316 | Transaction.Put(table1, mkItem ()) |> ignore 317 | 318 | shouldBeRejectedWithArgumentOutOfRangeException Transaction 319 | 320 | interface IClassFixture 321 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/SparseGSITests.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB.Tests 2 | 3 | open System 4 | 5 | open Swensen.Unquote 6 | open Xunit 7 | 8 | open FSharp.AWS.DynamoDB 9 | open FSharp.AWS.DynamoDB.Scripting 10 | 11 | [] 12 | module SparseGSITests = 13 | 14 | type GsiRecord = 15 | { [] 16 | HashKey: string 17 | [] 18 | RangeKey: string 19 | 20 | [] 21 | SecondaryHashKey: string option } 22 | 23 | type ``Sparse GSI Tests``(fixture: TableFixture) = 24 | 25 | let rand = let r = Random.Shared in fun () -> int64 <| r.Next() 26 | let mkItem () = 27 | { HashKey = guid () 28 | RangeKey = guid () 29 | SecondaryHashKey = if rand () % 2L = 0L then Some(guid ()) else None } 30 | 31 | let table = fixture.CreateEmpty() 32 | 33 | [] 34 | let ``GSI Put Operation`` () = 35 | let value = mkItem () 36 | let key = table.PutItem value 37 | let value' = table.GetItem key 38 | test <@ value = value' @> 39 | 40 | [] 41 | let ``GSI Query Operation (match)`` () = 42 | let value = { mkItem () with SecondaryHashKey = Some(guid ()) } 43 | let _key = table.PutItem value 44 | let res = table.Query(keyCondition = <@ fun (r: GsiRecord) -> r.SecondaryHashKey = value.SecondaryHashKey @>) 45 | test <@ 1 = Array.length res @> 46 | 47 | [] 48 | let ``GSI Query Operation (missing)`` () = 49 | let value = { mkItem () with SecondaryHashKey = Some(guid ()) } 50 | let key = table.PutItem value 51 | table.UpdateItem(key, <@ fun r -> { r with SecondaryHashKey = None } @>) |> ignore 52 | let res = table.Query(keyCondition = <@ fun (r: GsiRecord) -> r.SecondaryHashKey = value.SecondaryHashKey @>) 53 | test <@ Array.isEmpty res @> 54 | 55 | interface IClassFixture 56 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/Utils.fs: -------------------------------------------------------------------------------- 1 | namespace FSharp.AWS.DynamoDB.Tests 2 | 3 | open System 4 | open System.IO 5 | open System.Collections.Generic 6 | 7 | open FsCheck 8 | open Swensen.Unquote 9 | open Xunit 10 | 11 | open FSharp.AWS.DynamoDB 12 | 13 | open Amazon.DynamoDBv2 14 | open Amazon.DynamoDBv2.Model 15 | open Amazon.Runtime 16 | 17 | [] 18 | module Utils = 19 | 20 | let guid () = Guid.NewGuid().ToString("N") 21 | 22 | let getRandomTableName () = sprintf "fsdynamodb-%s" <| guid () 23 | 24 | let shouldFailwith<'T, 'Exn when 'Exn :> exn> (f: unit -> 'T) = <@ f () |> ignore @> |> raises<'Exn> 25 | 26 | let getDynamoDBAccount () = 27 | let credentials = BasicAWSCredentials("Fake", "Fake") 28 | let config = AmazonDynamoDBConfig(ServiceURL = "http://localhost:8000") 29 | 30 | new AmazonDynamoDBClient(credentials, config) :> IAmazonDynamoDB 31 | 32 | 33 | let clearAttribute (table: TableContext<'T>) (key: TableKey) (attribute: string) = 34 | let keyAttr = table.Template.ToAttributeValues key 35 | table.Client.UpdateItemAsync( 36 | new UpdateItemRequest( 37 | TableName = table.TableName, 38 | Key = keyAttr, 39 | AttributeUpdates = Dictionary(Map.ofSeq [ attribute, new AttributeValueUpdate(Action = AttributeAction.DELETE) ]) 40 | ) 41 | ) 42 | |> Async.AwaitTask 43 | |> Async.RunSynchronously 44 | |> ignore 45 | 46 | type FsCheckGenerators = 47 | static member MemoryStream = 48 | Arb.generate 49 | |> Gen.map (function 50 | | None -> null 51 | | Some bs -> new MemoryStream(bs)) 52 | |> Arb.fromGen 53 | 54 | 55 | type TableFixture() = 56 | 57 | let client = getDynamoDBAccount () 58 | let tableName = getRandomTableName () 59 | 60 | member _.Client = client 61 | member _.TableName = tableName 62 | 63 | member _.CreateEmpty<'TRecord>() = 64 | let throughput = ProvisionedThroughput(readCapacityUnits = 10L, writeCapacityUnits = 10L) 65 | Scripting.TableContext.Initialize<'TRecord>(client, tableName, Throughput.Provisioned throughput) 66 | 67 | interface IAsyncLifetime with 68 | member _.InitializeAsync() = System.Threading.Tasks.Task.CompletedTask 69 | member _.DisposeAsync() = client.DeleteTableAsync(tableName) 70 | -------------------------------------------------------------------------------- /tests/FSharp.AWS.DynamoDB.Tests/paket.references: -------------------------------------------------------------------------------- 1 | group Test 2 | FsCheck 3 | Microsoft.NET.Test.Sdk 4 | xunit 5 | xunit.runner.visualstudio --------------------------------------------------------------------------------