├── .cfconfig.json
├── .cfformat.json
├── .cflintrc
├── .editorconfig
├── .env.example
├── .gitattributes
├── .github
├── FUNDING.YML
└── workflows
│ ├── ci.yml
│ ├── cron.yml
│ ├── gh-release.yml
│ ├── pr.yml
│ └── tests.yml
├── .gitignore
├── .markdownlint copy.json
├── .markdownlint.json
├── .vscode
└── settings.json
├── APACHE_LICENSE.TXT
├── CONTRIBUTING.md
├── ModuleConfig.cfc
├── box.json
├── build
├── .travis.yml
├── Build.cfc
└── release.boxr
├── changelog.md
├── models
├── AmazonS3.cfc
├── MiniLogBox.cfc
├── Sv2Util.cfc
└── Sv4Util.cfc
├── readme.md
├── server-adobe@2018.json
├── server-adobe@2021.json
├── server-adobe@2023.json
├── server-boxlang-cfml@1.json
├── server-lucee@5.json
└── test-harness
├── .cflintrc
├── Application.cfc
├── box.json
├── config
├── Application.cfc
├── Coldbox.cfc
├── Router.cfc
└── WireBox.cfc
├── handlers
└── Main.cfc
├── index.cfm
├── layouts
└── Main.cfm
└── tests
├── Application.cfc
├── fixtures
├── get-presigned-url
│ ├── get-presigned-url.authz
│ ├── get-presigned-url.creq
│ ├── get-presigned-url.req
│ └── get-presigned-url.sts
├── get-vanilla-query-unreserved-s3
│ ├── get-vanilla-query-unreserved-s3.authz
│ ├── get-vanilla-query-unreserved-s3.creq
│ ├── get-vanilla-query-unreserved-s3.req
│ └── get-vanilla-query-unreserved-s3.sts
├── post-header-key-sort-s3
│ ├── post-header-key-sort-s3.authz
│ ├── post-header-key-sort-s3.creq
│ ├── post-header-key-sort-s3.req
│ └── post-header-key-sort-s3.sts
└── post-vanilla-query-s3
│ ├── post-vanilla-query-s3.authz
│ ├── post-vanilla-query-s3.creq
│ ├── post-vanilla-query-s3.req
│ └── post-vanilla-query-s3.sts
├── index.cfm
├── runner.cfm
├── specs
├── AmazonS3Spec.cfc
├── Sv4UtilSpec.cfc
└── models
│ └── AmazonS3
│ ├── buildKeyName.cfc
│ ├── buildUrlEndpoint.cfc
│ ├── createSignatureUtil.cfc
│ ├── getBucketLocation.cfc
│ ├── init.cfc
│ ├── requireBucketName.cfc
│ ├── setAuth.cfc
│ ├── setAwsDomain.cfc
│ ├── setAwsRegion.cfc
│ └── setSSL.cfc
└── tmp
└── .gitkeep
/.cfconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "adminPassword" : "coldbox",
3 | "componentCacheEnabled":false,
4 | "postParametersLimit" : 200,
5 | "robustExceptionEnabled":true,
6 | "saveClassFiles":false,
7 | "systemErr":"System",
8 | "systemOut":"System",
9 | "thistimezone":"UTC",
10 | "whitespaceManagement":"white-space-pref",
11 | "debuggingEnabled":true,
12 | "debuggingReportExecutionTimes":false,
13 | "disableInternalCFJavaComponents":false,
14 | "inspectTemplate":"always",
15 | "requestTimeout":"0,0,0,90",
16 | "datasources":{
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/.cfformat.json:
--------------------------------------------------------------------------------
1 | {
2 | "array.empty_padding": false,
3 | "array.padding": true,
4 | "array.multiline.min_length": 50,
5 | "array.multiline.element_count": 2,
6 | "array.multiline.leading_comma.padding": true,
7 | "array.multiline.leading_comma": false,
8 | "alignment.consecutive.assignments": true,
9 | "alignment.consecutive.properties": true,
10 | "alignment.consecutive.params": true,
11 | "alignment.doc_comments" : true,
12 | "brackets.padding": true,
13 | "comment.asterisks": "align",
14 | "binary_operators.padding": true,
15 | "for_loop_semicolons.padding": true,
16 | "function_call.empty_padding": false,
17 | "function_call.padding": true,
18 | "function_call.multiline.leading_comma.padding": true,
19 | "function_call.casing.builtin": "cfdocs",
20 | "function_call.casing.userdefined": "camel",
21 | "function_call.multiline.element_count": 3,
22 | "function_call.multiline.leading_comma": false,
23 | "function_call.multiline.min_length": 50,
24 | "function_declaration.padding": true,
25 | "function_declaration.empty_padding": false,
26 | "function_declaration.multiline.leading_comma": false,
27 | "function_declaration.multiline.leading_comma.padding": true,
28 | "function_declaration.multiline.element_count": 3,
29 | "function_declaration.multiline.min_length": 50,
30 | "function_declaration.group_to_block_spacing": "compact",
31 | "function_anonymous.empty_padding": false,
32 | "function_anonymous.group_to_block_spacing": "compact",
33 | "function_anonymous.multiline.element_count": 3,
34 | "function_anonymous.multiline.leading_comma": false,
35 | "function_anonymous.multiline.leading_comma.padding": true,
36 | "function_anonymous.multiline.min_length": 50,
37 | "function_anonymous.padding": true,
38 | "indent_size": 4,
39 | "keywords.block_to_keyword_spacing": "spaced",
40 | "keywords.group_to_block_spacing": "spaced",
41 | "keywords.padding_inside_group": true,
42 | "keywords.spacing_to_block": "spaced",
43 | "keywords.spacing_to_group": true,
44 | "keywords.empty_group_spacing": false,
45 | "max_columns": 115,
46 | "metadata.multiline.element_count": 3,
47 | "metadata.multiline.min_length": 50,
48 | "method_call.chain.multiline" : 3,
49 | "newline":"\n",
50 | "property.multiline.element_count": 3,
51 | "property.multiline.min_length": 30,
52 | "parentheses.padding": true,
53 | "strings.quote": "double",
54 | "strings.attributes.quote": "double",
55 | "struct.separator": " : ",
56 | "struct.padding": true,
57 | "struct.empty_padding": false,
58 | "struct.multiline.leading_comma": false,
59 | "struct.multiline.leading_comma.padding": true,
60 | "struct.multiline.element_count": 2,
61 | "struct.multiline.min_length": 60,
62 | "tab_indent": true
63 | }
64 |
--------------------------------------------------------------------------------
/.cflintrc:
--------------------------------------------------------------------------------
1 | {
2 | "rule": [],
3 | "includes": [
4 | { "code": "AVOID_USING_CFINCLUDE_TAG" },
5 | { "code": "AVOID_USING_CFABORT_TAG" },
6 | { "code": "AVOID_USING_CFEXECUTE_TAG" },
7 | { "code": "AVOID_USING_DEBUG_ATTR" },
8 | { "code": "AVOID_USING_ABORT" },
9 | { "code": "AVOID_USING_ISDATE" },
10 | { "code": "AVOID_USING_ISDEBUGMODE" },
11 | { "code": "AVOID_USING_CFINSERT_TAG" },
12 | { "code": "AVOID_USING_CFUPDATE_TAG" },
13 | { "code": "ARG_VAR_CONFLICT" },
14 | { "code": "ARG_VAR_MIXED" },
15 | { "code": "ARG_HINT_MISSING" },
16 | { "code": "ARG_HINT_MISSING_SCRIPT" },
17 | { "code" : "ARGUMENT_INVALID_NAME" },
18 | { "code" : "ARGUMENT_ALLCAPS_NAME" },
19 | { "code" : "ARGUMENT_TOO_WORDY" },
20 | { "code" : "ARGUMENT_IS_TEMPORARY" },
21 | { "code": "CFQUERYPARAM_REQ" },
22 | { "code": "COMPARE_INSTEAD_OF_ASSIGN" },
23 | { "code": "COMPONENT_HINT_MISSING" },
24 | { "code" : "COMPONENT_INVALID_NAME" },
25 | { "code" : "COMPONENT_ALLCAPS_NAME" },
26 | { "code" : "COMPONENT_TOO_SHORT" },
27 | { "code" : "COMPONENT_TOO_LONG" },
28 | { "code" : "COMPONENT_TOO_WORDY" },
29 | { "code" : "COMPONENT_IS_TEMPORARY" },
30 | { "code" : "COMPONENT_HAS_PREFIX_OR_POSTFIX" },
31 | { "code": "COMPLEX_BOOLEAN_CHECK" },
32 | { "code": "EXCESSIVE_FUNCTION_LENGTH" },
33 | { "code": "EXCESSIVE_COMPONENT_LENGTH" },
34 | { "code": "EXCESSIVE_ARGUMENTS" },
35 | { "code": "EXCESSIVE_FUNCTIONS" },
36 | { "code": "EXPLICIT_BOOLEAN_CHECK" },
37 | { "code": "FUNCTION_TOO_COMPLEX" },
38 | { "code": "FUNCTION_HINT_MISSING" },
39 | { "code": "FILE_SHOULD_START_WITH_LOWERCASE" },
40 | { "code": "LOCAL_LITERAL_VALUE_USED_TOO_OFTEN" },
41 | { "code": "GLOBAL_LITERAL_VALUE_USED_TOO_OFTEN" },
42 | { "code": "MISSING_VAR" },
43 | { "code" : "METHOD_INVALID_NAME" },
44 | { "code" : "METHOD_ALLCAPS_NAME" },
45 | { "code" : "METHOD_IS_TEMPORARY" },
46 | { "code": "NESTED_CFOUTPUT" },
47 | { "code": "NEVER_USE_QUERY_IN_CFM" },
48 | { "code": "OUTPUT_ATTR" },
49 | { "code" : "QUERYPARAM_REQ" },
50 | { "code": "UNUSED_LOCAL_VARIABLE" },
51 | { "code": "UNUSED_METHOD_ARGUMENT" },
52 | { "code": "SQL_SELECT_STAR" },
53 | { "code": "SCOPE_ALLCAPS_NAME" },
54 | { "code": "VAR_ALLCAPS_NAME" },
55 | { "code": "VAR_INVALID_NAME" },
56 | { "code": "VAR_TOO_WORDY" }
57 | ],
58 | "inheritParent": false,
59 | "parameters": {
60 | "TooManyFunctionsChecker.maximum" : 20
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # http://editorconfig.org
2 |
3 | root = true
4 |
5 | [*]
6 | end_of_line = lf
7 | charset = utf-8
8 | trim_trailing_whitespace = true
9 | insert_final_newline = false
10 | indent_style = tab
11 | indent_size = 4
12 | tab_width = 4
13 |
14 | [*.yml]
15 | indent_style = space
16 | indent_size = 2
17 |
18 | [*.{md,markdown}]
19 | trim_trailing_whitespace = false
20 | insert_final_newline = false
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | AWS_ACCESS_KEY=
2 | AWS_ACCESS_SECRET=
3 | AWS_REGION=us-east-1
4 | AWS_DOMAIN=amazonaws.com
5 | AWS_DEFAULT_BUCKET_NAME=
6 | # This env var will be used in a bucket name for the test suite. It's not
7 | # required for local development. In CI, the ENGINE var will be defined by
8 | # the CI setup, so that we can test on several engines simultaneously without
9 | # name collisions.
10 | #ENGINE=localhost
11 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
4 | # Custom for Visual Studio
5 | *.cs diff=csharp
6 | *.sln merge=union
7 | *.csproj merge=union
8 | *.vbproj merge=union
9 | *.fsproj merge=union
10 | *.dbproj merge=union
11 |
12 | # Standard to msysgit
13 | *.doc diff=astextplain
14 | *.DOC diff=astextplain
15 | *.docx diff=astextplain
16 | *.DOCX diff=astextplain
17 | *.dot diff=astextplain
18 | *.DOT diff=astextplain
19 | *.pdf diff=astextplain
20 | *.PDF diff=astextplain
21 | *.rtf diff=astextplain
22 | *.RTF diff=astextplain
23 |
--------------------------------------------------------------------------------
/.github/FUNDING.YML:
--------------------------------------------------------------------------------
1 | patreon: ortussolutions
2 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: AWS S3 SDK CI
2 |
3 | # Only on Development we build snapshots
4 | on:
5 | push:
6 | branches:
7 | - development
8 | - master
9 | workflow_dispatch:
10 |
11 | env:
12 | MODULE_ID: s3sdk
13 | jobs:
14 | #############################################
15 | # Tests First baby! We fail, no build :(
16 | #############################################
17 | tests:
18 | uses: ./.github/workflows/tests.yml
19 | secrets:
20 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
21 | S3SDK_AWS_ACCESS_KEY: ${{ secrets.S3SDK_AWS_ACCESS_KEY }}
22 | S3SDK_AWS_ACCESS_SECRET: ${{ secrets.S3SDK_AWS_ACCESS_SECRET }}
23 |
24 | #############################################
25 | # Build Module
26 | #############################################
27 | build:
28 | name: Build & Publish
29 | needs: tests
30 | runs-on: ubuntu-24.04
31 | steps:
32 | - name: Checkout Repository
33 | uses: actions/checkout@v2
34 | with:
35 | fetch-depth: 0
36 |
37 | - name: Setup Java
38 | uses: actions/setup-java@v2
39 | with:
40 | distribution: "adopt"
41 | java-version: "11"
42 |
43 | - name: Setup CommandBox
44 | uses: Ortus-Solutions/setup-commandbox@v2.0.1
45 | with:
46 | forgeboxAPIKey: ${{ secrets.FORGEBOX_TOKEN }}
47 |
48 | - name: Setup Environment Variables For Build Process
49 | id: current_version
50 | run: |
51 | echo "VERSION=`cat box.json | jq '.version' -r`" >> $GITHUB_ENV
52 | box package set version=@build.version@+@build.number@
53 | # master or snapshot
54 | echo "Github Ref is $GITHUB_REF"
55 | echo "BRANCH=master" >> $GITHUB_ENV
56 | if [ $GITHUB_REF == 'refs/heads/development' ]
57 | then
58 | echo "BRANCH=development" >> $GITHUB_ENV
59 | fi
60 |
61 | - name: Build ${{ env.MODULE_ID }}
62 | run: |
63 | box install commandbox-docbox
64 | box task run taskfile=build/Build target=run :version=${{ env.VERSION }} :projectName=${{ env.MODULE_ID }} :buildID=${{ github.run_number }} :branch=${{ env.BRANCH }}
65 |
66 | - name: Upload Build Artifacts
67 | if: success()
68 | uses: actions/upload-artifact@v4
69 | with:
70 | name: ${{ env.MODULE_ID }}
71 | path: |
72 | .artifacts/**/*
73 |
74 | - name: Upload Binaries to S3
75 | uses: jakejarvis/s3-sync-action@master
76 | with:
77 | args: --acl public-read
78 | env:
79 | AWS_S3_BUCKET: "downloads.ortussolutions.com"
80 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
81 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_SECRET }}
82 | SOURCE_DIR: ".artifacts/${{ env.MODULE_ID }}"
83 | DEST_DIR: "ortussolutions/coldbox-modules/${{ env.MODULE_ID }}"
84 |
85 | - name: Upload API Docs to S3
86 | uses: jakejarvis/s3-sync-action@master
87 | with:
88 | args: --acl public-read
89 | env:
90 | AWS_S3_BUCKET: "apidocs.ortussolutions.com"
91 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
92 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_SECRET }}
93 | SOURCE_DIR: ".tmp/apidocs"
94 | DEST_DIR: "coldbox-modules/${{ env.MODULE_ID }}/${{ env.VERSION }}"
95 |
96 | - name: Publish To ForgeBox
97 | run: |
98 | cd .tmp/${{ env.MODULE_ID }}
99 | cat box.json
100 | box forgebox publish
101 |
102 | - name: Inform Slack
103 | if: ${{ always() }}
104 | uses: rtCamp/action-slack-notify@v2
105 | env:
106 | SLACK_CHANNEL: coding
107 | SLACK_COLOR: ${{ job.status }} # or a specific color like 'green' or '#ff00ff'
108 | SLACK_ICON_EMOJI: ":bell:"
109 | SLACK_MESSAGE: '${{ env.MODULE_ID }} Built with ${{ job.status }}!'
110 | SLACK_TITLE: "${{ env.MODULE_ID }} Build"
111 | SLACK_USERNAME: CI
112 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
113 |
--------------------------------------------------------------------------------
/.github/workflows/cron.yml:
--------------------------------------------------------------------------------
1 | name: Daily Tests
2 |
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *' # Runs at 00:00 UTC every day
6 |
7 | jobs:
8 | tests:
9 | uses: ./.github/workflows/tests.yml
10 | secrets:
11 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
12 | S3SDK_AWS_ACCESS_KEY: ${{ secrets.S3SDK_AWS_ACCESS_KEY }}
13 | S3SDK_AWS_ACCESS_SECRET: ${{ secrets.S3SDK_AWS_ACCESS_SECRET }}
14 |
--------------------------------------------------------------------------------
/.github/workflows/gh-release.yml:
--------------------------------------------------------------------------------
1 | # Publish Github Release
2 | name: Github Release
3 |
4 | on:
5 | push:
6 | tags:
7 | - v[0-9]+.*
8 |
9 | jobs:
10 | create-release:
11 | runs-on: ubuntu-24.04
12 | steps:
13 | - uses: actions/checkout@v2
14 | - uses: taiki-e/create-gh-release-action@v1.5.0
15 | with:
16 | # Produced by the build/Build.cfc
17 | changelog: changelog.md
18 | env:
19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
20 |
--------------------------------------------------------------------------------
/.github/workflows/pr.yml:
--------------------------------------------------------------------------------
1 | name: Pull Requests
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - development
7 |
8 | jobs:
9 | tests:
10 | uses: ./.github/workflows/tests.yml
11 | secrets:
12 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
13 | S3SDK_AWS_ACCESS_KEY: ${{ secrets.S3SDK_AWS_ACCESS_KEY }}
14 | S3SDK_AWS_ACCESS_SECRET: ${{ secrets.S3SDK_AWS_ACCESS_SECRET }}
15 |
16 | # Format PR
17 | format:
18 | name: Format
19 | runs-on: ubuntu-24.04
20 | steps:
21 | - name: Checkout Repository
22 | uses: actions/checkout@v2
23 |
24 | - uses: Ortus-Solutions/commandbox-action@v1.0.2
25 | with:
26 | cmd: run-script format
27 |
28 | - name: Commit Format Changes
29 | uses: stefanzweifel/git-auto-commit-action@v4
30 | with:
31 | commit_message: Apply cfformat changes
32 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Test Suites
2 |
3 | # We are a reusable Workflow only
4 | on:
5 | workflow_call:
6 | secrets:
7 | SLACK_WEBHOOK_URL:
8 | required: false
9 | S3SDK_AWS_ACCESS_KEY:
10 | required: true
11 | S3SDK_AWS_ACCESS_SECRET:
12 | required: true
13 |
14 | jobs:
15 | tests:
16 | name: Tests
17 | runs-on: ubuntu-24.04
18 | env:
19 | DB_USER: root
20 | DB_PASSWORD: root
21 | AWS_DOMAIN: amazonaws.com
22 | AWS_REGION: us-east-1
23 | continue-on-error: ${{ matrix.experimental }}
24 | strategy:
25 | fail-fast: false
26 | matrix:
27 | cfengine: [ "lucee@5", "adobe@2021", "boxlang-cfml@1" ]
28 | coldboxVersion: [ "^7" ]
29 | experimental: [ false ]
30 | include:
31 | - cfengine: "adobe@2023"
32 | coldboxVersion: "^7"
33 | experimental: true
34 | - cfengine: "adobe@2018"
35 | coldboxVersion: "^7"
36 | experimental: true
37 | - coldboxVersion: "be"
38 | cfengine: "lucee@5"
39 | experimental: true
40 | - coldboxVersion: "be"
41 | cfengine: "adobe@2018"
42 | experimental: true
43 | - coldboxVersion: "be"
44 | cfengine: "adobe@2021"
45 | experimental: true
46 | - coldboxVersion: "be"
47 | cfengine: "boxlang-cfml@1"
48 | experimental: true
49 | steps:
50 | - name: Checkout Repository
51 | uses: actions/checkout@v2
52 |
53 | - name: Setup Java
54 | uses: actions/setup-java@v2
55 | with:
56 | distribution: "adopt"
57 | java-version: "11"
58 |
59 | # - name: Setup Database and Fixtures
60 | # run: |
61 | # #sudo systemctl start mysql.service
62 | # ## Create Database
63 | # #mysql -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }} -e 'CREATE DATABASE cbsecurity;'
64 | # ## Import Database
65 | # #mysql -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }} < test-harness/tests/resources/cbsecurity.sql
66 |
67 | - name: Setup Environment For Testing Process
68 | run: |
69 | # Setup .env
70 | touch .env
71 | # ENV
72 | printf "ENVIRONMENT=development\n" >> .env
73 | printf "AWS_ACCESS_KEY=${{ secrets.S3SDK_AWS_ACCESS_KEY }}\n" >> .env
74 | printf "AWS_ACCESS_SECRET=${{ secrets.S3SDK_AWS_ACCESS_SECRET }}\n" >> .env
75 | printf "AWS_REGION=${{ env.AWS_REGION }}\n" >> .env
76 | printf "AWS_DOMAIN=${{ env.AWS_DOMAIN }}\n" >> .env
77 | printf "ENGINE=${{ matrix.cfengine }}\n" >> .env
78 | printf "COLDBOX_VERSION=${{ matrix.coldboxVersion }}\n" >> .env
79 |
80 |
81 | - name: Setup CommandBox CLI
82 | uses: Ortus-Solutions/setup-commandbox@v2.0.1
83 | with:
84 | version: 6.1.0
85 |
86 | # This needs to happen until v6.2 of commandbox is released
87 | - name: Update CommandBox-BoxLang
88 | if : ${{ matrix.cfengine == 'boxlang-cfml@1' }}
89 | run: |
90 | box install --force commandbox-boxlang
91 |
92 | - name: Install Test Harness with ColdBox ${{ matrix.coldboxVersion }}
93 | run: |
94 | box install
95 | cd test-harness
96 | box package set dependencies.coldbox=${{ matrix.coldboxVersion }}
97 | box install
98 |
99 | - name: Start ${{ matrix.cfengine }} Server
100 | run: |
101 | box server start serverConfigFile="server-${{ matrix.cfengine }}.json" --noSaveSettings --debug
102 | curl http://127.0.0.1:60299
103 |
104 | - name: Run Tests
105 | run: |
106 | mkdir -p test-harness/tests/results
107 | box testbox run --verbose outputFile=test-harness/tests/results/test-results outputFormats=json,antjunit
108 | ls -lR test-harness/tests
109 |
110 | - name: Publish Test Results
111 | uses: EnricoMi/publish-unit-test-result-action@v2
112 | if: always()
113 | with:
114 | files: test-harness/tests/results/**/*.xml
115 | check_name: "${{ matrix.cfengine }} Test Results - Coldbox ${{matrix.coldboxVersion}}"
116 |
117 | - name: Upload Test Results to Artifacts
118 | if: always()
119 | uses: actions/upload-artifact@v4
120 | with:
121 | name: ${{ matrix.cfengine }}-test-results-${{matrix.coldboxVersion}}-${{ matrix.experimental }}
122 | path: |
123 | test-harness/tests/results/**/*
124 |
125 | - name: Failure Debugging Log
126 | if: ${{ failure() }}
127 | run: |
128 | box server log serverConfigFile="server-${{ matrix.cfengine }}.json"
129 |
130 | - name: Upload Debugging Log To Artifacts
131 | if: ${{ failure() }}
132 | uses: actions/upload-artifact@v4
133 | with:
134 | name: Failure Debugging Info - ${{ matrix.cfengine }}
135 | path: |
136 | .engine/**/logs/*
137 | .engine/**/WEB-INF/cfusion/logs/*
138 |
139 | - name: Slack Notifications
140 | # Only on failures and NOT in pull requests
141 | if: ${{ failure() && !startsWith( 'pull_request', github.event_name ) }}
142 | uses: rtCamp/action-slack-notify@v2
143 | env:
144 | SLACK_CHANNEL: coding
145 | SLACK_COLOR: ${{ job.status }} # or a specific color like 'green' or '#ff00ff'
146 | SLACK_ICON_EMOJI: ":bell:"
147 | SLACK_MESSAGE: '${{ github.repository }} tests failed :cry:'
148 | SLACK_TITLE: ${{ github.repository }} Tests For ${{ matrix.cfengine }} failed
149 | SLACK_USERNAME: CI
150 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
151 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Artifacts and temp folders
2 | .artifacts/**
3 | .tmp/**
4 |
5 | # Engine + Secrets + databases
6 | .env
7 | .engine/**
8 | test-harness/.engine
9 | .db/**
10 |
11 | # Dependencies
12 | test-harness/coldbox/**
13 | test-harness/docbox/**
14 | test-harness/testbox/**
15 | test-harness/logs/**
16 | test-harness/modules/**
17 | test-harness/tests/tmp/**
18 | !test-harness/tests/tmp/.gitkeep
19 |
20 | # modules
21 | modules/**
22 |
23 | # log files
24 | logs/**
25 | .idea/
26 |
--------------------------------------------------------------------------------
/.markdownlint copy.json:
--------------------------------------------------------------------------------
1 | {
2 | "line-length": false,
3 | "single-h1": false,
4 | "no-hard-tabs" : false,
5 | "fenced-code-language" : false,
6 | "no-bare-urls" : false,
7 | "first-line-h1": false,
8 | "no-multiple-blanks": {
9 | "maximum": 2
10 | },
11 | "no-duplicate-header" : {
12 | "siblings_only" : true
13 | },
14 | "no-duplicate-heading" : false,
15 | "no-inline-html" : false
16 | }
17 |
--------------------------------------------------------------------------------
/.markdownlint.json:
--------------------------------------------------------------------------------
1 | {
2 | "line-length": false,
3 | "single-h1": false,
4 | "no-hard-tabs" : false,
5 | "fenced-code-language" : false,
6 | "no-bare-urls" : false,
7 | "first-line-h1": false,
8 | "no-multiple-blanks": {
9 | "maximum": 2
10 | },
11 | "no-duplicate-header" : {
12 | "siblings_only" : true
13 | },
14 | "no-inline-html" : false
15 | }
16 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "cfml.mappings": [
3 | {
4 | "logicalPath": "/coldbox",
5 | "directoryPath": "./test-harness/coldbox",
6 | "isPhysicalDirectoryPath": false
7 | },
8 | {
9 | "logicalPath": "/testbox",
10 | "directoryPath": "./test-harness/testbox",
11 | "isPhysicalDirectoryPath": false
12 | }
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/APACHE_LICENSE.TXT:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
10 |
11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
12 |
13 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
14 |
15 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
16 |
17 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
18 |
19 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
20 |
21 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
22 |
23 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
24 |
25 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
26 |
27 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
28 |
29 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
30 |
31 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
32 |
33 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
34 |
35 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and
36 |
37 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and
38 |
39 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
40 |
41 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
42 |
43 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
44 |
45 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
46 |
47 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
48 |
49 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
50 |
51 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
52 |
53 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
54 |
55 | END OF TERMS AND CONDITIONS
56 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Setting up your development environment
2 |
3 | - Clone the repository
4 | - `cd test-harness`
5 | - `cp .env.example .env`
6 | - Edit .env file appropriately
7 | - `box install`
8 | - `box run-script start`
9 | This will use Lucee 5 server config file by default. See `start:[version]`
10 | options in test-harness/box.json for other engines.
11 | - Run tests:
12 | - In browser, visit `http://localhost:60299/tests/runner.cfm`
13 | - On command line: `box testbox run`
14 |
--------------------------------------------------------------------------------
/ModuleConfig.cfc:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright Ortus Solutions, Corp
3 | * www.ortussolutions.com
4 | * ---
5 | * This module connects your application to Amazon S3
6 | **/
7 | component {
8 |
9 | // Module Properties
10 | this.title = "Amazon S3 SDK";
11 | this.author = "Ortus Solutions, Corp";
12 | this.webURL = "https://www.ortussolutions.com";
13 | this.description = "This SDK will provide you with Amazon S3 connectivity for any ColdFusion (CFML) application.";
14 |
15 | // Module Entry Point
16 | this.entryPoint = "s3sdk";
17 | // Model Namespace
18 | this.modelNamespace = "s3sdk";
19 | // CF Mapping
20 | this.cfmapping = "s3sdk";
21 | // Auto-map models
22 | this.autoMapModels = false;
23 |
24 | /**
25 | * Configure
26 | */
27 | function configure(){
28 | // Settings
29 | variables.settings = {
30 | accessKey : "",
31 | autoContentType : false,
32 | autoMD5 : false,
33 | awsDomain : "amazonaws.com",
34 | awsRegion : "us-east-1",
35 | debug : false,
36 | defaultACL : "public-read",
37 | defaultBucketName : "",
38 | defaultCacheControl : "no-store, no-cache, must-revalidate",
39 | defaultDelimiter : "/",
40 | defaultStorageClass : "STANDARD",
41 | defaultTimeOut : 300,
42 | encryptionCharset : "utf-8",
43 | retriesOnError : 3,
44 | secretKey : "",
45 | serviceName : "s3",
46 | signatureType : "V4",
47 | ssl : true,
48 | throwOnRequestError : true,
49 | defaultEncryptionAlgorithm : "",
50 | defaultEncryptionKey : "",
51 | defaultObjectOwnership : "ObjectWriter",
52 | defaultBlockPublicAcls : false,
53 | defaultIgnorePublicAcls : false,
54 | defaultBlockPublicPolicy : false,
55 | defaultRestrictPublicBuckets : false,
56 | urlStyle : "path"
57 | };
58 | }
59 |
60 | /**
61 | * Fired when the module is registered and activated.
62 | */
63 | function onLoad(){
64 | binder
65 | .map( "AmazonS3@s3sdk" )
66 | .to( "#moduleMapping#.models.AmazonS3" )
67 | .initArg( name = "accessKey", value = variables.settings.accessKey )
68 | .initArg( name = "secretKey", value = variables.settings.secretKey )
69 | .initArg( name = "awsDomain", value = variables.settings.awsDomain )
70 | .initArg( name = "awsRegion", value = variables.settings.awsregion )
71 | .initArg( name = "encryptionCharset", value = variables.settings.encryptionCharset )
72 | .initArg( name = "signatureType", value = variables.settings.signatureType )
73 | .initArg( name = "ssl", value = variables.settings.ssl )
74 | .initArg( name = "defaultTimeOut", value = variables.settings.defaultTimeOut )
75 | .initArg( name = "defaultDelimiter", value = variables.settings.defaultDelimiter )
76 | .initArg( name = "defaultBucketName", value = variables.settings.defaultBucketName )
77 | .initArg( name = "defaultCacheControl", value = variables.settings.defaultCacheControl )
78 | .initArg( name = "defaultStorageClass", value = variables.settings.defaultStorageClass )
79 | .initArg( name = "defaultACL", value = variables.settings.defaultACL )
80 | .initArg( name = "throwOnRequestError", value = variables.settings.throwOnRequestError )
81 | .initArg( name = "autoContentType", value = variables.settings.autoContentType )
82 | .initArg( name = "autoMD5", value = variables.settings.autoMD5 )
83 | .initArg( name = "serviceName", value = variables.settings.serviceName )
84 | .initArg( name = "debug", value = variables.settings.debug )
85 | .initArg( name = "defaultEncryptionAlgorithm", value = variables.settings.defaultEncryptionAlgorithm )
86 | .initArg( name = "defaultEncryptionKey", value = variables.settings.defaultEncryptionKey )
87 | .initArg( name = "defaultObjectOwnership", value = variables.settings.defaultObjectOwnership )
88 | .initArg( name = "defaultBlockPublicAcls", value = variables.settings.defaultBlockPublicAcls )
89 | .initArg( name = "defaultIgnorePublicAcls", value = variables.settings.defaultIgnorePublicAcls )
90 | .initArg( name = "defaultBlockPublicPolicy", value = variables.settings.defaultBlockPublicPolicy )
91 | .initArg(
92 | name = "defaultRestrictPublicBuckets",
93 | value = variables.settings.defaultRestrictPublicBuckets
94 | ).initArg(
95 | name = "urlStyle",
96 | value = variables.settings.urlStyle
97 | );
98 | binder.map( "Sv4Util@s3sdk" ).to( "#moduleMapping#.models.AmazonS3" );
99 |
100 | binder.map( "Sv2Util@s3sdk" ).to( "#moduleMapping#.models.AmazonS3" );
101 | }
102 |
103 |
104 |
105 |
106 | /**
107 | * Fired when the module is unregistered and unloaded
108 | */
109 | function onUnload(){
110 | }
111 |
112 | }
113 |
--------------------------------------------------------------------------------
/box.json:
--------------------------------------------------------------------------------
1 | {
2 | "name":"Amazon S3 SDK",
3 | "version":"5.8.1",
4 | "slug":"s3sdk",
5 | "location":"https://downloads.ortussolutions.com/ortussolutions/coldbox-modules/s3sdk/@build.version@/s3sdk-@build.version@.zip",
6 | "author":"Ortus Solutions, Corp",
7 | "homepage":"https://github.com/coldbox-modules/s3sdk",
8 | "documentation":"https://github.com/coldbox-modules/s3sdk",
9 | "repository":{
10 | "type":"git",
11 | "URL":"https://github.com/coldbox-modules/s3sdk"
12 | },
13 | "bugs":"https://github.com/coldbox-modules/s3sdk/issues",
14 | "shortDescription":"This SDK will provide you with Amazon S3, Digital Ocean Spaces connectivity for any ColdFusion (CFML) application.",
15 | "type":"modules",
16 | "license":[
17 | {
18 | "type":"Apache2",
19 | "URL":"https://www.apache.org/licenses/LICENSE-2.0"
20 | }
21 | ],
22 | "contributors":[
23 | "Andrew Davis"
24 | ],
25 | "dependencies":{},
26 | "devDependencies":{
27 | "commandbox-cfformat":"*",
28 | "commandbox-docbox":"*",
29 | "commandbox-dotenv":"*",
30 | "commandbox-cfconfig":"*"
31 | },
32 | "installPaths":{},
33 | "ignore":[
34 | "**/.*",
35 | "build",
36 | "test-harness",
37 | "/server*.json"
38 | ],
39 | "testbox":{
40 | "runner":"http://localhost:60299/tests/runner.cfm"
41 | },
42 | "scripts":{
43 | "build:module":"task run taskFile=build/Build.cfc :projectName=`package show slug` :version=`package show version`",
44 | "build:docs":"task run taskFile=build/Build.cfc target=docs :projectName=`package show slug` :version=`package show version`",
45 | "release":"recipe build/release.boxr",
46 | "format":"cfformat run models,test-harness/tests/**/*.cfc,*.cfc --overwrite",
47 | "format:watch":"cfformat watch models,test-harness/tests/**/*.cfc,*.cfc ./.cfformat.json",
48 | "format:check":"cfformat check models,test-harness/tests/**/*.cfc,*.cfc",
49 | "install:dependencies":"install --force && cd test-harness && install --force"
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/build/.travis.yml:
--------------------------------------------------------------------------------
1 | language: java
2 |
3 | notifications:
4 | slack:
5 | secure: FIHlTn/YO7Wgumm1uIqmoEsqjQA7fV0AE94Rjc5yKzM3AquQa8HicgDVVk0d2GrKRnl0xt3j4ZJV//VJyIjlCd/QVKuj48R2ChjEY2im3+99HFPafCUI5/S2uyowKU6mJTFonH9v6p41eqxdbiAxJdDGOT0V2Gpt3UBSNuHz8ED9/aIHqv+P7M+VD6Xd2XYwctPniWlaSWx57sWcnG/VkFG45qFQAyha64uxOOe4M3ZmG/n5FfauZ8cBVLiRKEIr+CyNhh1ujfzi7+4uzMlSNL5t/BbZamAQuZzqGzGQ9RVvIlyPgUGNJtDEE/hWS09aagXF5T6EMj00szizErh4J1/x4qZwml5+TcBN31E0QmAhCtZe85sr3tYgic+hEz9XX1yymQzf/C7n4to2yNvq0r4g51xDk8IuP95WEh7zaqLlvFZvBFgxpHZBMYlRvhytjOYDeIFRMcGwHZcXosaG2ejqDwcGq/LC4oeG4sSwmg9sdRrtcmcanrNqrBka86WYO6LntI3JdZ86/1ACEUHzhCCwvrKELc9Ji1xxGAgS7QKH+s2/hnJuiMyv73gOVLKYC+wPMLt+fvOmPLSEl+PJiAIlToBq1KUBg03RSQLfPOLD7OrJ8VvDZsEPwejqlGDyc4wRglS9OTi7SnN5LYHSDNDdGdREegWqq9qDHEYEVLI=
6 |
7 | env:
8 | # Fill out these global variables for build process
9 | global:
10 | - MODULE_ID=s3sdk
11 | matrix:
12 | - ENGINE=lucee@5
13 | - ENGINE=adobe@2016
14 | - ENGINE=adobe@2018
15 | - ENGINE=adobe@2021
16 |
17 | branches:
18 | only:
19 | - development
20 | - master
21 |
22 | dist: focal
23 | sudo: required
24 |
25 | before_install:
26 | # CommandBox Keys
27 | - curl -fsSl https://downloads.ortussolutions.com/debs/gpg | sudo apt-key add -
28 | - sudo echo "deb https://downloads.ortussolutions.com/debs/noarch /" | sudo tee -a
29 | /etc/apt/sources.list.d/commandbox.list
30 |
31 | install:
32 | # Install Commandbox
33 | - sudo apt-get update && sudo apt-get --assume-yes install jq commandbox
34 | # Install CommandBox Supporting Librarires
35 | - box install commandbox-cfconfig,commandbox-dotenv,commandbox-docbox
36 | # If using auto-publish, you will need to provide your API token with this line:
37 | - box config set endpoints.forgebox.APIToken=$FORGEBOX_API_TOKEN > /dev/null
38 |
39 | script:
40 | # Set Current Version
41 | - TARGET_VERSION=`cat $TRAVIS_BUILD_DIR/box.json | jq '.version' -r`
42 | - TRAVIS_TAG=${TARGET_VERSION}
43 | - echo "Starting build for ${MODULE_ID} v${TARGET_VERSION}"
44 | # Replace version so builder can issue it
45 | - box package set version=@build.version@+@build.number@
46 | # Startup the harness
47 | - cd test-harness
48 | # Seed our env
49 | - touch .env
50 | - printf "AWS_ACCESS_KEY=${AWS_ACCESS_KEY}\n" >> .env
51 | - printf "AWS_ACCESS_SECRET=${AWS_ACCESS_SECRET}\n" >> .env
52 | - printf "AWS_REGION=${AWS_REGION}\n" >> .env
53 | - printf "AWS_DOMAIN=${AWS_DOMAIN}\n" >> .env
54 | - printf "ENGINE=${ENGINE}\n" >> .env
55 | # run our dependency install to ensure the workbench is in place
56 | - box install
57 | # run our matrix server
58 | - box server start serverConfigFile="server-${ENGINE}.json"
59 | # Startup the app
60 | - curl http://localhost:60299
61 | # Debugging of tests
62 | #- curl http://localhost:60299/tests/runner.cfm?reporter=json -o testresults.json && cat testresults.json
63 | # move back to build dir to build it
64 | - cd $TRAVIS_BUILD_DIR
65 | # Build Project
66 | - box task run taskfile=build/Build target=run :version=${TARGET_VERSION} :projectName=${MODULE_ID} :buildID=${TRAVIS_BUILD_NUMBER} :branch=${TRAVIS_BRANCH}
67 | # Cat results for debugging
68 | #- cat build/results.json
69 |
70 | after_failure:
71 | - cd $TRAVIS_BUILD_DIR/test-harness
72 | # Display the contents of our root directory
73 | # Spit out our Commandbox log in case we need to debug
74 | - box server log server-${ENGINE}.json
75 | - cat `box system-log`
76 |
77 | deploy:
78 | # Module Deployment
79 | - provider: s3
80 | on:
81 | branch:
82 | - master
83 | - development
84 | condition: "$ENGINE = lucee@5"
85 | skip_cleanup: true
86 | #AWS Credentials need to be set in Travis
87 | access_key_id: $AWS_ACCESS_KEY
88 | secret_access_key: $AWS_ACCESS_SECRET
89 | # Destination
90 | bucket: "downloads.ortussolutions.com"
91 | local-dir: $TRAVIS_BUILD_DIR/.artifacts/$MODULE_ID
92 | upload-dir: ortussolutions/coldbox-modules/$MODULE_ID
93 | acl: public_read
94 |
95 | # API Docs Deployment
96 | - provider: s3
97 | on:
98 | branch:
99 | - master
100 | - development
101 | condition: "$ENGINE = lucee@5"
102 | skip_cleanup: true
103 | #AWS Credentials need to be set in Travis
104 | access_key_id: $AWS_ACCESS_KEY
105 | secret_access_key: $AWS_ACCESS_SECRET
106 | bucket: "apidocs.ortussolutions.com"
107 | local-dir: $TRAVIS_BUILD_DIR/.tmp/apidocs
108 | upload-dir: coldbox-modules/$MODULE_ID/$TARGET_VERSION
109 | acl: public_read
110 |
111 | # Github Release only on Master
112 | - provider: releases
113 | api_key: ${GITHUB_TOKEN}
114 | on:
115 | branch:
116 | - master
117 | condition: "$ENGINE = lucee@5"
118 | skip_cleanup: true
119 | edge: true
120 | file_glob: true
121 | file: $TRAVIS_BUILD_DIR/.artifacts/$MODULE_ID/**/*
122 | release_notes_file: $TRAVIS_BUILD_DIR/changelog-latest.md
123 | name: v${TRAVIS_TAG}
124 | tag_name: v${TRAVIS_TAG}
125 | overwrite: true
126 |
127 | after_deploy:
128 | # Move to build out artifact
129 | - cd ${TRAVIS_BUILD_DIR}/.tmp/${MODULE_ID}
130 | - cat box.json
131 | # Only publish once
132 | - if [ ${ENGINE} = 'lucee@5' ]; then box forgebox publish; fi
133 |
--------------------------------------------------------------------------------
/build/Build.cfc:
--------------------------------------------------------------------------------
1 | /**
2 | * Build process for ColdBox Modules
3 | * Adapt to your needs.
4 | */
5 | component {
6 |
7 | /**
8 | * Constructor
9 | */
10 | function init(){
11 | // Setup Pathing
12 | variables.cwd = getCWD().reReplace( "\.$", "" );
13 | variables.artifactsDir = cwd & "/.artifacts";
14 | variables.buildDir = cwd & "/.tmp";
15 | variables.apiDocsURL = "http://localhost:60299/apidocs/";
16 | variables.testRunner = "http://localhost:60299/tests/runner.cfm";
17 |
18 | // Source Excludes Not Added to final binary
19 | variables.excludes = [
20 | "build",
21 | "node-modules",
22 | "resources",
23 | "test-harness",
24 | "(package|package-lock).json",
25 | "webpack.config.js",
26 | "server-.*\.json",
27 | "docker-compose.yml",
28 | "^\..*"
29 | ];
30 |
31 | // Cleanup + Init Build Directories
32 | [
33 | variables.buildDir,
34 | variables.artifactsDir
35 | ].each( function( item ){
36 | if ( directoryExists( item ) ) {
37 | directoryDelete( item, true );
38 | }
39 | // Create directories
40 | directoryCreate( item, true, true );
41 | } );
42 |
43 | // Create Mappings
44 | fileSystemUtil.createMapping(
45 | "coldbox",
46 | variables.cwd & "test-harness/coldbox"
47 | );
48 |
49 | return this;
50 | }
51 |
52 | /**
53 | * Run the build process: test, build source, docs, checksums
54 | *
55 | * @projectName The project name used for resources and slugs
56 | * @version The version you are building
57 | * @buldID The build identifier
58 | * @branch The branch you are building
59 | */
60 | function run(
61 | required projectName,
62 | version = "1.0.0",
63 | buildID = createUUID(),
64 | branch = "development"
65 | ){
66 | // Create project mapping
67 | fileSystemUtil.createMapping( arguments.projectName, variables.cwd );
68 |
69 | // Build the source
70 | buildSource( argumentCollection = arguments );
71 |
72 | // Build Docs
73 | arguments.outputDir = variables.buildDir & "/apidocs";
74 | docs( argumentCollection = arguments );
75 |
76 | // checksums
77 | buildChecksums();
78 |
79 | // Build latest changelog
80 | latestChangelog();
81 |
82 | // Finalize Message
83 | print
84 | .line()
85 | .boldMagentaLine( "Build Process is done! Enjoy your build!" )
86 | .toConsole();
87 | }
88 |
89 | /**
90 | * Run the test suites
91 | */
92 | function runTests(){
93 | // Tests First, if they fail then exit
94 | print.blueLine( "Testing the package, please wait..." ).toConsole();
95 |
96 | command( "testbox run" )
97 | .params(
98 | runner = variables.testRunner,
99 | verbose = true,
100 | outputFile = "#variables.cwd#/test-harness/results/test-results",
101 | outputFormats="json,antjunit"
102 | )
103 | .run();
104 |
105 | // Check Exit Code?
106 | if ( shell.getExitCode() ) {
107 | return error( "Cannot continue building, tests failed!" );
108 | }
109 | }
110 |
111 | /**
112 | * Build the source
113 | *
114 | * @projectName The project name used for resources and slugs
115 | * @version The version you are building
116 | * @buldID The build identifier
117 | * @branch The branch you are building
118 | */
119 | function buildSource(
120 | required projectName,
121 | version = "1.0.0",
122 | buildID = createUUID(),
123 | branch = "development"
124 | ){
125 | // Build Notice ID
126 | print
127 | .line()
128 | .boldMagentaLine(
129 | "Building #arguments.projectName# v#arguments.version#+#arguments.buildID# from #cwd# using the #arguments.branch# branch."
130 | )
131 | .toConsole();
132 |
133 | // Prepare exports directory
134 | variables.exportsDir = variables.artifactsDir & "/#projectName#/#arguments.version#";
135 | directoryCreate( variables.exportsDir, true, true );
136 |
137 | // Project Build Dir
138 | variables.projectBuildDir = variables.buildDir & "/#projectName#";
139 | directoryCreate(
140 | variables.projectBuildDir,
141 | true,
142 | true
143 | );
144 |
145 | // Copy source
146 | print.blueLine( "Copying source to build folder..." ).toConsole();
147 | copy(
148 | variables.cwd,
149 | variables.projectBuildDir
150 | );
151 |
152 | // Create build ID
153 | fileWrite(
154 | "#variables.projectBuildDir#/#projectName#-#version#+#buildID#",
155 | "Built with love on #dateTimeFormat( now(), "full" )#"
156 | );
157 |
158 | // Updating Placeholders
159 | print.greenLine( "Updating version identifier to #arguments.version#" ).toConsole();
160 | command( "tokenReplace" )
161 | .params(
162 | path = "/#variables.projectBuildDir#/**",
163 | token = "@build.version@",
164 | replacement = arguments.version
165 | )
166 | .run();
167 |
168 | print.greenLine( "Updating build identifier to #arguments.buildID#" ).toConsole();
169 | command( "tokenReplace" )
170 | .params(
171 | path = "/#variables.projectBuildDir#/**",
172 | token = ( arguments.branch == "master" ? "@build.number@" : "+@build.number@" ),
173 | replacement = ( arguments.branch == "master" ? arguments.buildID : "-snapshot" )
174 | )
175 | .run();
176 |
177 | // zip up source
178 | var destination = "#variables.exportsDir#/#projectName#-#version#.zip";
179 | print.greenLine( "Zipping code to #destination#" ).toConsole();
180 | cfzip(
181 | action = "zip",
182 | file = "#destination#",
183 | source = "#variables.projectBuildDir#",
184 | overwrite = true,
185 | recurse = true
186 | );
187 |
188 | // Copy box.json for convenience
189 | fileCopy(
190 | "#variables.projectBuildDir#/box.json",
191 | variables.exportsDir
192 | );
193 | }
194 |
195 | /**
196 | * Produce the API Docs
197 | */
198 | function docs(
199 | required projectName,
200 | version = "1.0.0",
201 | outputDir = ".tmp/apidocs"
202 | ){
203 | // Create project mapping
204 | fileSystemUtil.createMapping( arguments.projectName, variables.cwd );
205 | // Generate Docs
206 | print.greenLine( "Generating API Docs, please wait..." ).toConsole();
207 | directoryCreate( arguments.outputDir, true, true );
208 |
209 | command( "docbox generate" )
210 | .params(
211 | "source" = "models",
212 | "mapping" = "models",
213 | "strategy-projectTitle" = "#arguments.projectName# v#arguments.version#",
214 | "strategy-outputDir" = arguments.outputDir
215 | )
216 | .run();
217 |
218 | print.greenLine( "API Docs produced at #arguments.outputDir#" ).toConsole();
219 |
220 | var destination = "#variables.exportsDir#/#projectName#-docs-#version#.zip";
221 | print.greenLine( "Zipping apidocs to #destination#" ).toConsole();
222 | cfzip(
223 | action = "zip",
224 | file = "#destination#",
225 | source = "#arguments.outputDir#",
226 | overwrite = true,
227 | recurse = true
228 | );
229 | }
230 |
231 | /**
232 | * Build the latest changelog file: changelog-latest.md
233 | */
234 | function latestChangelog(){
235 | print.blueLine( "Building latest changelog..." ).toConsole();
236 |
237 | if ( !fileExists( variables.cwd & "changelog.md" ) ) {
238 | return error( "Cannot continue building, changelog.md file doesn't exist!" );
239 | }
240 |
241 | fileWrite(
242 | variables.cwd & "changelog-latest.md",
243 | fileRead( variables.cwd & "changelog.md" ).split( "----" )[ 2 ].trim() & chr( 13 ) & chr( 10 )
244 | );
245 |
246 | print
247 | .greenLine( "Latest changelog file created at `changelog-latest.md`" )
248 | .line()
249 | .line( fileRead( variables.cwd & "changelog-latest.md" ) );
250 | }
251 |
252 | /********************************************* PRIVATE HELPERS *********************************************/
253 |
254 | /**
255 | * Build Checksums
256 | */
257 | private function buildChecksums(){
258 | print.greenLine( "Building checksums" ).toConsole();
259 | command( "checksum" )
260 | .params(
261 | path = "#variables.exportsDir#/*.zip",
262 | algorithm = "SHA-512",
263 | extension = "sha512",
264 | write = true
265 | )
266 | .run();
267 | command( "checksum" )
268 | .params(
269 | path = "#variables.exportsDir#/*.zip",
270 | algorithm = "md5",
271 | extension = "md5",
272 | write = true
273 | )
274 | .run();
275 | }
276 |
277 | /**
278 | * DirectoryCopy is broken in lucee
279 | */
280 | private function copy( src, target, recurse = true ){
281 | // process paths with excludes
282 | directoryList(
283 | src,
284 | false,
285 | "path",
286 | function( path ){
287 | var isExcluded = false;
288 | variables.excludes.each( function( item ){
289 | if ( path.replaceNoCase( variables.cwd, "", "all" ).reFindNoCase( item ) ) {
290 | isExcluded = true;
291 | }
292 | } );
293 | return !isExcluded;
294 | }
295 | ).each( function( item ){
296 | // Copy to target
297 | if ( fileExists( item ) ) {
298 | print.blueLine( "Copying #item#" ).toConsole();
299 | fileCopy( item, target );
300 | } else {
301 | print.greenLine( "Copying directory #item#" ).toConsole();
302 | directoryCopy(
303 | item,
304 | target & "/" & item.replace( src, "" ),
305 | true
306 | );
307 | }
308 | } );
309 | }
310 |
311 | /**
312 | * Gets the last Exit code to be used
313 | **/
314 | private function getExitCode(){
315 | return ( createObject( "java", "java.lang.System" ).getProperty( "cfml.cli.exitCode" ) ?: 0 );
316 | }
317 |
318 | }
319 |
--------------------------------------------------------------------------------
/build/release.boxr:
--------------------------------------------------------------------------------
1 | # This recipe signifies a new release of the module by doing merges and bumps accordingly
2 |
3 | # Check out master and update it locally
4 | !git checkout -f master
5 | !git pull origin master
6 |
7 | # Merge development into it for release
8 | !git merge --no-ff development
9 |
10 | # Tag the master repo with the version from box.json
11 | echo "git tag v`package show version`" | run
12 |
13 | # Push all branches back out to github
14 | !git push origin --all
15 |
16 | # Push all tags
17 | !git push origin --tags
18 |
19 | # Check development again
20 | !git checkout -f development
21 |
22 | # Bump to prepare for a new release, do minor, change if needed and don't tag
23 | bump --minor --!tagVersion
24 | !git commit -a -m "version bump"
25 | !git push origin development
--------------------------------------------------------------------------------
/changelog.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ----
9 | ## [Unreleased]
10 |
11 | ### Fixed
12 |
13 | * Set all `hash` usage algorithms to MD5 for Adobe change to default algorithm
14 |
15 | ## v5.7.1 => 2023-SEP-21
16 |
17 | ### Fixed
18 |
19 | * Added `entryPoint`, `modelNamespace` and `cfmapping` keys to ModuleConfig, to ensure mappings for downstream modules are available during framework load
20 |
21 | ## v5.7.0 => 2023-MAY-03
22 |
23 | ### Changed
24 |
25 | * Updates permission handling to account for updated AWS default bucket policies
26 |
27 | ## v5.6.0 => 2023-MAR-07
28 |
29 | ### Added
30 |
31 | * Support for overriding response headers like content type for pre-signed URLs
32 |
33 | ## v5.5.2 => 2023-FEB-07
34 |
35 | ### Fixed
36 |
37 | * Multi-part upload concurrency fixes
38 |
39 | ## v5.5.1 => 2023-FEB-03
40 |
41 | ### Added
42 |
43 | * Support for multi-part file uploads to conserve memory usage
44 |
45 | ## v5.4.1 => 2023-FEB-02
46 |
47 |
48 | ## v5.3.1 => 2023-FEB-02
49 |
50 | ## v5.2.0 => 2023-JAN-26
51 |
52 | ### Added
53 |
54 | * Add support for server side encryption
55 | * Add retry support for S3 connection failures
56 |
57 | ## v5.1.2 => 2022-OCT-19
58 |
59 | ### Added
60 |
61 | * Added property to ensure URLEndpointHostname can be retreived
62 |
63 | ## v5.1.1 => 2022-NOV-1
64 |
65 | ### Fixed
66 |
67 | * Fixes an issue when header content types were not present in the arguments scope
68 |
69 | ## v5.0.0 => 2022-OCT-19
70 |
71 | ### Changed / Compatibility
72 |
73 | * Dropped Adobe 2016 Support
74 | * Configuration setting: `encryption_charset` changed to `encryptionCharset` for consistency. **Breaking change**
75 |
76 | ### Added
77 |
78 | * Revamp of ACLs to allow any grant to be added to any object.
79 | * Ability to request `PUT` signed URLs so you don't have to upload to a middle server and then S3. You can now create a signed PUT operation that you can upload directly to S3.
80 | * Encoding of signed URLs to avoid issues with weird filenames
81 | * Preserve content type on copy
82 | * Ability to choose how many times to retry s3 operations when they fail with a 500 or 503. This can happen due to throttling or rate limiting. You can configure it with the new setting: `retriesOnError` and it defaults to 3.
83 | * New ColdBox Module template
84 | * Add bucket name to test suite
85 | * Github actions migration
86 | * Avoid error logs for `objectExists()`
87 |
88 | ### Fixed
89 |
90 | * @bdw429s Fixed tons of issues with filename encodings. :party:
91 | * 404 is not an "error" status when verifying for errors on requests
92 | * The argument name in `putObject()` was incorrect "arguments.content" instead of "arguments.data", this only happens when md5 == "auto" so it probably slipped by for some time.
93 |
94 | ----
95 |
96 | ## v4.8.0 => 2021-JUL-06
97 |
98 | ### Added
99 |
100 | * Migrations to github actions
101 | * Added new argument to `downloadObject( getAsBinary : 'no' )` so you can get binary or non binary objects. Defaults to non binary.
102 |
103 | ----
104 |
105 | ## v4.7.0 => 2021-MAR-24
106 |
107 | ### Added
108 |
109 | * Adobe 2021 to the testing matrix and supported engines
110 |
111 | ### Fixed
112 |
113 | * Adobe 2021 issues with date formatting
114 | * Watcher needed to use the root `.cfformat.json`
115 |
116 | ----
117 |
118 | ## v4.6.0 => 2021-FEB-18
119 |
120 | ### Added
121 |
122 | * New method: `setAccessControlPolicy()` so you can add ACLs to buckets
123 | * `getBucket()` has been updated to use the ListObjectsv2 API - which is recommended by AWS for more detailed information.
124 | * Implements SigV4-signed requests thanks to @sbleon's amazing work!
125 | * Added more formatting rules via cfformat
126 | * Added a `gitattributes` for cross OS compatibilities
127 | * Added a `markdownlint.json` for more control over markdown
128 | * Added new package script : `format:watch` to format and watch :)
129 |
130 | ### Changed
131 |
132 | * Updated tests to fire up in ColdBox 6
133 | * Handles some cleanup of parameters which were being passed as resource strings ( which were then being encoded and blowing up ).
134 | * Updated release recipe to match newer modules.
135 |
136 | ### Removed
137 |
138 | * Cleanup of old cfml engine files
139 | * Cleanup of old init code
140 | * Removed some settings from test harness
141 |
142 | ----
143 | ## v4.5.0 => 2020-MAR-11
144 |
145 | * `Feature` : `SV4Util` is now a singleton for added performance and more configuration expansion by adding the sdk reference
146 | * `Improvement` : Better error messages when s3 goes :boom:
147 | * `Bug` : Fix for ACF double encoding
148 |
149 | ----
150 | ## v4.4.0 => 2019-MAY-15
151 |
152 | * Reworked SSL setup to allow for dynamic creation of the URL entry point
153 | * Removed ACF11 officially, it is impossible to deal with their cfhttp junk! It works, but at your own risk.
154 |
155 | ----
156 | ## v4.3.0 => 2019-APR-05
157 |
158 | * Removal of debugging code
159 |
160 | ----
161 | ## v4.2.1 => 2019-MAR-26
162 |
163 | * Avoid double encoding on `copy`, `putObjectFile`, and `delete()` operations
164 | * Consolidate ssl to use `variables` instead of `arguments`
165 |
166 | ----
167 | ## v4.2.0 => 2019-MAR-15
168 |
169 | * ACF compatiblities
170 | * Fixes for auth on folder commands
171 | * New constructor args: `defaultDelimiter` for folder operations, `defaultBucketname` so you can set a default bucket for all bucket related operations.
172 | * Avoid nasty error on bucket deletion
173 | * Add new method `objectExists()` boolean check for objects
174 | * Fix URI encoding on signatures for headers and query params
175 |
176 | ----
177 | ## v4.1.1 => 2019-MAR-26
178 |
179 | * Left some dump/aborts
180 |
181 | ----
182 | ## v4.1.0 => 2019-MAR-13
183 |
184 | * DigitalOcean Spaces compatiblity
185 | * Region naming support, you can now pass the `awsRegion` argument to the constructor to select the AWS or DO region
186 | * SSL is now the default for all operations
187 | * Addition of two new constructor params: `awsRegion` and `awsDomain` to support regions and multi-domains for AWS and Digital Ocean
188 | * Added log debugging to calls and signatures if LogBox is on `debug` level
189 |
190 | ----
191 | ## v4.0.1 => 2018-OCT-22
192 |
193 | * Fixes to models location, oopsy!
194 |
195 | ----
196 | ## v4.0.0 => 2018-OCT-20
197 |
198 | * AWS Region Support
199 | * Migrated Module Layout to use Ortus Standard Module Layout
200 | * Added testing for all ACF Engines
201 | * Rework as generic Box module (compatibility change), you must move your `s3sdk` top level settings in ColdBox Config to `moduleSettings.s3sdk`
202 | * `deleteBucket()` returns **false** if bucket doesn't exist instead of throwing an exception
203 | * Few optimizations and documentation of the API
204 |
205 | ----
206 | ## v3.0.1
207 |
208 | * Travis Updates and self-publishing
209 |
210 | ----
211 | ## v3.0.0
212 |
213 | * Ugprade to ColdBox 4 standards
214 | * Upgrade to latest Amazon S3 SDK standards
215 | * Travis build process
216 |
217 | ----
218 | ## v2.0
219 |
220 | * Original Spec as a ColdBox Plugin
221 |
--------------------------------------------------------------------------------
/models/MiniLogBox.cfc:
--------------------------------------------------------------------------------
1 | component {
2 |
3 | MiniLogBox function init( boolean debug ){
4 | variables.debug = arguments.debug;
5 | variables.logs = [];
6 | return this;
7 | }
8 |
9 | boolean function canDebug(){
10 | return variables.debug;
11 | }
12 |
13 | function debug( required string msg, data ){
14 | arrayAppend( variables.logs, arguments.msg );
15 | if ( structKeyExists( arguments, "data" ) ) {
16 | arrayAppend( variables.logs, arguments.data );
17 | }
18 | }
19 |
20 | function error( required string msg, data ){
21 | arrayAppend( variables.logs, "Error: " & arguments.msg );
22 | if ( structKeyExists( arguments, "data" ) ) {
23 | arrayAppend( variables.logs, arguments.data );
24 | }
25 | }
26 |
27 | function warn( required string msg, data ){
28 | arrayAppend( variables.logs, "Warn: " & arguments.msg );
29 | if ( structKeyExists( arguments, "data" ) ) {
30 | arrayAppend( variables.logs, arguments.data );
31 | }
32 | }
33 |
34 | array function getLogs(){
35 | return variables.logs;
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/models/Sv2Util.cfc:
--------------------------------------------------------------------------------
1 | /**
2 | * Amazon Web Services Signature 4 Utility for ColdFusion
3 | **/
4 | component singleton {
5 |
6 | /**
7 | * Creates a new instance of the utility for generating signatures using the supplied settings
8 | *
9 | * @return new instance initalized with specified settings
10 | */
11 | Sv2Util function init(){
12 | return this;
13 | }
14 |
15 | /**
16 | * Generates a version 2 signature and returns headers for the request
17 | *
18 | * @requestMethod - Request operation, ie PUT, GET, POST, etcetera.
19 | * @requestURI - Absolute path of the URI. Portion of the URL after the host, to the "?" beginning the query string
20 | * @requestHeaders - Structure of http headers for used the request.
21 | * @requestParams - Structure containing any url parameters for the request.
22 | * @amzHeaders - Structure containing any amazon headers used to build the signature.
23 | */
24 | public struct function generateSignatureData(
25 | required string requestMethod,
26 | required string hostName,
27 | required string requestURI,
28 | required any requestBody,
29 | required struct requestHeaders,
30 | required struct requestParams,
31 | required string accessKey,
32 | required string secretKey,
33 | required string regionName,
34 | required string serviceName,
35 | boolean signedPayload = true,
36 | array excludeHeaders = [],
37 | string amzDate,
38 | string dateStamp
39 | ){
40 | var props = {
41 | requestHeaders : arguments.requestHeaders,
42 | requestParams : arguments.requestParams,
43 | canonicalURI : "",
44 | accessKey : arguments.accessKey,
45 | secretKey : arguments.secretKey,
46 | regionName : arguments.regionName,
47 | serviceName : arguments.serviceName,
48 | hostName : arguments.hostName,
49 | requestMethod : arguments.requestMethod,
50 | requestPayload : arguments.signedPayload ? hash256( arguments.requestBody ) : arguments.requestBody,
51 | excludeHeaders : arguments.excludeHeaders
52 | };
53 |
54 | // Override current utc date and time
55 | if ( structKeyExists( arguments, "amzDate" ) || structKeyExists( arguments, "dateStamp" ) ) {
56 | props.dateStamp = arguments.dateStamp;
57 | props.amzDate = arguments.amzDate;
58 | } else {
59 | var utcDateTime = dateConvert( "local2UTC", now() );
60 | // Generate UTC time stamps
61 | props.dateStamp = dateFormat( utcDateTime, "yyyymmdd" );
62 | props.amzDate = props.dateStamp & "T" & timeFormat( utcDateTime, "HHmmss" ) & "Z";
63 | }
64 |
65 | var sortedHeaders = structSort( props.requestHeaders, "text", "asc" );
66 | for ( var header in sortedHeaders ) {
67 | props.canonicalURI &= lCase( trim( header ) ) & ":" & props.requestHeaders[ header ] & chr( 10 );
68 | };
69 |
70 | props.canonicalURI = props.requestMethod & chr( 10 )
71 | & ( props.requestHeaders[ "content-md5" ] ?: "" ) & chr( 10 )
72 | & ( props.requestHeaders[ "content-type" ] ?: "" ) & chr( 10 )
73 | & props.amzDate & chr( 10 )
74 | & props.canonicalURI
75 | & "/" & arguments.requestURI;
76 |
77 | // Calculate the hash of the information
78 | var digest = hMAC_SHA1( props.secretKey, props.canonicalURI );
79 | // fix the returned data to be a proper signature
80 | props.signature = toBase64( digest );
81 | props.authorizationHeader = "AWS #props.accessKey#:#props.signature#";
82 |
83 | return props;
84 | }
85 |
86 |
87 | /**
88 | * NSA SHA-1 Algorithm: RFC 2104HMAC-SHA1
89 | */
90 | private binary function HMAC_SHA1( required string signKey, required string signMessage ){
91 | var jMsg = javacast( "string", arguments.signMessage ).getBytes( encryptionCharset );
92 | var jKey = javacast( "string", arguments.signKey ).getBytes( encryptionCharset );
93 | var key = createObject( "java", "javax.crypto.spec.SecretKeySpec" ).init( jKey, "HmacSHA1" );
94 | var mac = createObject( "java", "javax.crypto.Mac" ).getInstance( key.getAlgorithm() );
95 |
96 | mac.init( key );
97 | mac.update( jMsg );
98 |
99 | return mac.doFinal();
100 | }
101 |
102 | }
103 |
--------------------------------------------------------------------------------
/models/Sv4Util.cfc:
--------------------------------------------------------------------------------
1 | /**
2 | * Amazon Web Services Signature 4 Utility for ColdFusion
3 | * Version Date: 2016-04-12 (Alpha)
4 | *
5 | * Copyright 2016 Leigh (cfsearching)
6 | *
7 | * Requirements: Adobe ColdFusion 10+
8 | * AWS Signature 4 specifications: http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | */
22 | component singleton {
23 |
24 | /**
25 | * Creates a new instance of the utility for generating signatures using the supplied settings
26 | *
27 | * @return new instance initalized with specified settings
28 | */
29 | Sv4Util function init(){
30 | // Algorithms used in calculating the signature
31 | variables.signatureAlgorithm = "AWS4-HMAC-SHA256";
32 | variables.hashAlorithm = "SHA256";
33 |
34 | return this;
35 | }
36 |
37 |
38 | /**
39 | * Generates Signature 4 properties for the supplied request settings.
40 | *
41 | * @requestMethod - Request operation, ie PUT, GET, POST, etcetera.
42 | * @hostName - Target host name, example: bucketname.s3.amazonaws.com
43 | * @requestURI - Absolute path of the URI. Portion of the URL after the host, to the "?" beginning the query string
44 | * @requestBody - Body of the request. Either a string or binary value.
45 | * @requestHeaders - Structure of http headers for used the request. Mandatory host and date headers are automatically generated.
46 | * @requestParams - Structure containing any url parameters for the request. Mandatory parameters are automatically generated.
47 | * @excludeHeaders - (Optional) List of header names AWS can exclude from the signing process. Default is an empty array, which means all headers should be "signed"
48 | * @amzDate - (Optional) Override the automatic X-Amz-Date calculation with this value. Current UTC date. If supplied, @dateStamp is required. Format: yyyyMMddTHHnnssZ
49 | * @regionName - (Optional) Override the instance region name with this value. Example "us-east-1"
50 | * @serviceName - (Optional) Override the instance service name with this value. Example "s3"
51 | * @dateStamp - (Optional) Override the automatic dateStamp calculation with this value. Current UTC date (only). If supplied, @amzDate is required. Format: yyyyMMdd
52 | * @presigningDownloadURL - (Optional) Generates a signed request with all required parameters in the query string, and no headers except for Host.
53 | *
54 | * @return Signature value, authorization header and all properties part of the signature calculation: ALGORITHM,AMZDATE,AUTHORIZATIONHEADER,CANONICALHEADERS,CANONICALQUERYSTRING,CANONICALREQUEST,CANONICALURI,CREDENTIALSCOPE,DATESTAMP,EXCLUDEHEADERS,HOSTNAME,REGIONNAME,REQUESTHEADERS,REQUESTMETHOD,REQUESTPARAMS,REQUESTPAYLOAD,SERVICENAME,SIGNATURE,SIGNEDHEADERS,SIGNKEYBYTES,STRINGTOSIGN
55 | */
56 | public struct function generateSignatureData(
57 | required string requestMethod,
58 | required string hostName,
59 | required string requestURI,
60 | required any requestBody,
61 | required struct requestHeaders,
62 | required struct requestParams,
63 | required string accessKey,
64 | required string secretKey,
65 | required string regionName,
66 | required string serviceName,
67 | array excludeHeaders = [],
68 | string amzDate,
69 | string dateStamp,
70 | boolean presignDownloadURL = false
71 | ){
72 | // Initialize properties
73 | var props = {};
74 | var hasQueryParams = structCount( arguments.requestParams ) > 0;
75 | var utcDateTime = dateConvert( "local2UTC", now() );
76 |
77 | // Generate UTC time stamps
78 | props.dateStamp = dateFormat( utcDateTime, "yyyymmdd" );
79 | props.amzDate = props.dateStamp & "T" & timeFormat( utcDateTime, "HHmmss" ) & "Z";
80 |
81 | // Override current utc date and time
82 | if ( structKeyExists( arguments, "amzDate" ) || structKeyExists( arguments, "dateStamp" ) ) {
83 | props.dateStamp = arguments.dateStamp;
84 | props.amzDate = arguments.amzDate;
85 | }
86 |
87 | props.accessKey = arguments.accessKey;
88 | props.secretKey = arguments.secretKey;
89 | props.regionName = arguments.regionName;
90 | props.serviceName = arguments.serviceName;
91 |
92 | // ///////////////////////////////////
93 | // Basic request properties
94 | // ///////////////////////////////////
95 | props.algorithm = variables.signatureAlgorithm;
96 | props.hostName = arguments.hostName;
97 | props.requestMethod = arguments.requestMethod;
98 | props.canonicalURI = buildCanonicalURI( requestURI = arguments.requestURI );
99 |
100 | // For signed requests, the payload is a checksum
101 | props.requestPayload = hash256( arguments.requestBody );
102 | props.credentialScope = buildCredentialScope(
103 | dateStamp = props.dateStamp,
104 | serviceName = props.serviceName,
105 | regionName = props.regionName
106 | );
107 |
108 |
109 | // ///////////////////////////////////
110 | // Validate headers/parameters
111 | // ///////////////////////////////////
112 | props.requestHeaders = duplicate( arguments.requestHeaders );
113 | props.requestParams = duplicate( arguments.requestParams );
114 |
115 | // Host header is mandatory for ALL requests
116 | props.requestHeaders[ "Host" ] = arguments.hostName;
117 |
118 | // Apply mandatory headers and parameters
119 | if ( presignDownloadURL ) {
120 | // First, normalize request headers
121 | props.requestHeaders = cleanHeaders( props.requestHeaders );
122 | props.excludeHeaders = cleanHeaderNames( arguments.excludeHeaders );
123 |
124 | // Signed requests must include a checksum, ie hash of payload
125 | // props.requestParams["X-Amz-Content-Sha256"] = props.requestPayload;
126 | props.requestPayload = "UNSIGNED-PAYLOAD";
127 |
128 | // Identify which headers will be included in the signing process
129 | props.signedHeaders = buildSignedHeaders(
130 | requestHeaders = props.requestHeaders,
131 | excludeNames = props.excludeHeaders
132 | );
133 |
134 | // When presigning a download URL, canonical query string must also
135 | // include the parameters used as part of the signing process, ie hashing algorithm,
136 | // credential scope, date, and signed headers parameters.
137 | props.requestParams[ "X-Amz-Algorithm" ] = variables.signatureAlgorithm;
138 | props.requestParams[ "X-Amz-Credential" ] = "#props.accessKey#/#props.credentialScope#";
139 | props.requestParams[ "X-Amz-SignedHeaders" ] = props.signedHeaders;
140 | props.requestParams[ "X-Amz-Date" ] = props.amzDate;
141 |
142 | // Finally, normalize url parameters
143 | props.requestParams = encodeQueryParams( queryParams = props.requestParams );
144 | }
145 | // All other request types (PUT, DELETE, POST, ....)
146 | else {
147 | // Signed requests must include a checksum, ie hash of payload
148 | props.requestHeaders[ "X-Amz-Content-Sha256" ] = props.requestPayload;
149 |
150 | // Host header is mandatory for ALL requests
151 | props.requestHeaders[ "Host" ] = arguments.hostName;
152 | // Date header is mandatory when not passing values in url
153 | props.requestHeaders[ "X-Amz-Date" ] = props.amzDate;
154 |
155 | // Normalize headers and url parameters
156 | props.requestHeaders = cleanHeaders( props.requestHeaders );
157 | props.excludeHeaders = cleanHeaderNames( arguments.excludeHeaders );
158 | // Identify which headers will be included in the signing process
159 | props.signedHeaders = buildSignedHeaders(
160 | requestHeaders = props.requestHeaders,
161 | excludeNames = props.excludeHeaders
162 | );
163 | props.requestParams = encodeQueryParams( queryParams = props.requestParams );
164 | }
165 |
166 |
167 | // ///////////////////////////////////////
168 | // Generate signature
169 | // ///////////////////////////////////////
170 |
171 | // Generate header, query, and request strings
172 | props.canonicalQueryString = buildCanonicalQueryString( requestParams = props.requestParams );
173 | props.canonicalHeaders = buildCanonicalHeaders( requestHeaders = props.requestHeaders );
174 | props.canonicalRequest = buildCanonicalRequest( argumentCollection = props );
175 |
176 | // Generate signature and authorization strings
177 | props.stringToSign = generateStringToSign( argumentCollection = props );
178 | props.signKeyBytes = generateSignatureKey( argumentCollection = props );
179 | props.signature = lCase(
180 | binaryEncode( hmacBinary( message = props.stringToSign, key = props.signKeyBytes ), "hex" )
181 | );
182 | props.authorizationHeader = buildAuthorizationHeader( argumentCollection = props );
183 |
184 | // (Debugging) Convert binary values into human readable form
185 | props.signKeyBytes = binaryEncode( props.signKeyBytes, "hex" );
186 |
187 | return props;
188 | }
189 |
190 | /**
191 | * Generates request string to sign
192 | *
193 | * @amzDate - Current timestamp in UTC. Format yyyyMMddTHHnnssZ
194 | * @credentialScope - String defining scope of request. See buildCredentialScope().
195 | * @canonicalRequest - Canonical request string
196 | *
197 | * @return - String to be signed
198 | */
199 | private string function generateStringToSign(
200 | required string amzDate,
201 | required string credentialScope,
202 | required string canonicalRequest
203 | ){
204 | // Format: Algorithm + '\n' + RequestDate + '\n' + CredentialScope + '\n' + HashedCanonicalRequest
205 | var elements = [
206 | variables.signatureAlgorithm,
207 | arguments.amzDate,
208 | arguments.credentialScope,
209 | hash256( arguments.canonicalRequest )
210 | ];
211 |
212 | return arrayToList( elements, chr( 10 ) );
213 | }
214 |
215 | /**
216 | * Generate canonical request string
217 | *
218 | * @requestMethod - Request operation, ie PUT, GET, POST, etcetera.
219 | * @canonicalURI - Canonical URL string. See buildCanonicalURI
220 | * @canonicalHeaders - Canonical header string. See buildCanonicalHeaders
221 | * @canonicalQueryString - Canonical query string. See buildCanonicalQueryString
222 | * @signedHeaders - List of signed headers. See buildSignedHeaders
223 | * @requestPayload - For signed requests, this is the hash of the request body. Otherwise, the raw request body
224 | */
225 | private string function buildCanonicalRequest(
226 | required string requestMethod,
227 | required string canonicalURI,
228 | required string canonicalQueryString,
229 | required string canonicalHeaders,
230 | required string signedHeaders,
231 | required string requestPayload
232 | ){
233 | var canonicalRequest = "";
234 |
235 | // Build ordered list of elements in the request, delimited by new lines
236 | // Note: Headers and signed headers should never be empty. "Host" header is always required.
237 | canonicalRequest = arguments.requestMethod & chr( 10 )
238 | & arguments.canonicalURI & chr( 10 )
239 | & arguments.canonicalQueryString & chr( 10 )
240 | & arguments.canonicalHeaders & chr( 10 )
241 | & arguments.signedHeaders & chr( 10 )
242 | & arguments.requestPayload;
243 |
244 | return canonicalRequest;
245 | }
246 |
247 | /**
248 | * Generates canonical query string
249 | *
250 | *
URI-encode each parameter name and value according to RFC 3986
251 | *
Percent-encode all other characters with %XY, where X and Y are hexadecimal characters (0-9 and uppercase A-F)
252 | *
Sort the encoded parameter names by character code in ascending order (ASCII order)
253 | *
Build the canonical query string by starting with the first parameter name in the sorted list.
254 | *
For each parameter, append the URI-encoded parameter name, followed by the character '=' (ASCII code 61), followed by the URI-encoded parameter value. Use an empty string for parameters that have no value.
255 | *
Append the character '&' (ASCII code 38) after each parameter value, except for the last value in the list.
256 | *
257 | *
258 | * @requestParams Structure containing all parameters passed via the query string.
259 | * @isEncoded If true, the supplied parameters are already url encoded
260 | *
261 | * @return canonical query string
262 | */
263 | private string function buildCanonicalQueryString( required struct requestParams, boolean isEncoded = true ){
264 | var encodedParams = "";
265 | var paramNames = "";
266 | var paramPairs = "";
267 |
268 | // Ensure parameter names and values are URL encoded first
269 | encodedParams = isEncoded ? arguments.requestParams : encodeQueryParams( arguments.requestParams );
270 |
271 | // Extract and sort encoded parameter names
272 | paramNames = structKeyArray( encodedParams );
273 | arraySort( paramNames, "text", "asc" );
274 |
275 | // Build array of sorted name/value pairs
276 | paramPairs = [];
277 | arrayEach( paramNames, function( string param ){
278 | arrayAppend( paramPairs, arguments.param & "=" & encodedParams[ arguments.param ] );
279 | } );
280 |
281 | // Finally, generate sorted list of parameters, delimited by "&"
282 | return arrayToList( paramPairs, "&" );
283 | }
284 |
285 |
286 | /**
287 | * Generates a list of signed header names.
288 | *
289 | *
"...By adding this list of headers, you tell AWS which headers in the request
290 | * are part of the signing process and which ones AWS can ignore (for example, any
291 | * additional headers added by a proxy) for purposes of validating the request."
292 | *
293 | * @requestHeaders Raw headers to be included in request
294 | * @excludeNames Names of any headers AWS should ignore for the signing process
295 | *
296 | * @return Sorted list of signed header names, delimited by semi-colon ";"
297 | */
298 | private string function buildSignedHeaders( required struct requestHeaders, required array excludeNames ){
299 | var name = "";
300 | var headerNames = [];
301 | var allHeaders = !arrayLen( arguments.excludeNames );
302 |
303 | // Identify which headers are "signed"
304 | structEach( arguments.requestHeaders, function( string name, any value ){
305 | if ( allHeaders || !arrayFindNoCase( excludeNames, arguments.name ) ) {
306 | arrayAppend( headerNames, arguments.name );
307 | }
308 | } );
309 |
310 | // Sort header names in ASCII order
311 | arraySort( headerNames, "text", "asc" );
312 |
313 | // Return list of names
314 | return arrayToList( headerNames, ";" );
315 | }
316 |
317 | /**
318 | * Generates a list of canonical headers
319 | *
320 | * @requestHeaders Structure containing headers to be included in request hash
321 | *
322 | * @return Sorted list of header pairs, delimited by new lines
323 | */
324 | private string function buildCanonicalHeaders( required struct requestHeaders ){
325 | var pairs = "";
326 | var names = "";
327 | var headers = "";
328 |
329 | // Scrub the header names and values first
330 | headers = cleanHeaders( arguments.requestHeaders );
331 |
332 | // Sort header names in ASCII order
333 | names = structKeyArray( headers );
334 | arraySort( names, "text", "asc" );
335 |
336 | // Build array of sorted header name and value pairs
337 | pairs = [];
338 | arrayEach( names, function( string key ){
339 | arrayAppend( pairs, arguments.key & ":" & headers[ arguments.key ] );
340 | } );
341 |
342 | // Generate list. Note: List must END WITH a new line character
343 | return arrayToList( pairs, chr( 10 ) ) & chr( 10 );
344 | }
345 |
346 |
347 | /**
348 | * Generates canonical URI. Encoded, absolute path component of the URI,
349 | * which is everything in the URI from the HTTP host to the question mark character ("?")
350 | * that begins the query string parameters (if any)
351 | *
352 | * @uriPath URI or path. If empty, "/" will be used
353 | *
354 | * @return URL encoded path
355 | */
356 | public string function buildCanonicalURI( required string requestURI ){
357 | var path = arguments.requestURI;
358 | // Return "/" for empty path
359 | if ( !len( trim( path ) ) ) {
360 | path = "/";
361 | }
362 |
363 | // Convert to absolute path (if needed)
364 | if ( left( path, 1 ) != "/" ) {
365 | path = "/" & path;
366 | }
367 |
368 | return urlEncodePath( path );
369 | }
370 |
371 |
372 | /**
373 | * Generates signing key for AWS Signature V4
374 | *
375 | *