├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.yml │ ├── config.yml │ ├── documentation.yml │ └── feature-request.yml ├── PULL_REQUEST_TEMPLATE.md ├── actions │ ├── check-submodules │ │ ├── README.md │ │ ├── action.yml │ │ ├── dist │ │ │ └── index.js │ │ ├── index.js │ │ ├── package-lock.json │ │ └── package.json │ └── release-tag │ │ ├── action.yml │ │ ├── index.js │ │ ├── package-lock.json │ │ ├── package.json │ │ └── packed │ │ └── index.js ├── dependabot.yml ├── docker-images │ ├── al2-x64 │ │ └── Dockerfile │ ├── al2012-x64 │ │ └── Dockerfile │ ├── al2023-x64 │ │ └── Dockerfile │ ├── alpine-3.16-arm64 │ │ └── Dockerfile │ ├── alpine-3.16-armv6 │ │ └── Dockerfile │ ├── alpine-3.16-armv7 │ │ └── Dockerfile │ ├── alpine-3.16-x64 │ │ └── Dockerfile │ ├── alpine-3.16-x86 │ │ └── Dockerfile │ ├── build_cmake.sh │ ├── cache_maven.sh │ ├── entrypoint.sh │ ├── fedora-34-x64 │ │ └── Dockerfile │ ├── manylinux1-x64 │ │ └── Dockerfile │ ├── manylinux1-x86 │ │ └── Dockerfile │ ├── manylinux2014-aarch64 │ │ └── Dockerfile │ ├── manylinux2014-armv7l │ │ └── Dockerfile │ ├── manylinux2014-x64 │ │ └── Dockerfile │ ├── manylinux2014-x86 │ │ └── Dockerfile │ ├── musllinux-1-1-aarch64 │ │ └── Dockerfile │ ├── musllinux-1-1-x64 │ │ └── Dockerfile │ ├── node-10-linux-x64 │ │ └── Dockerfile │ ├── opensuse-leap │ │ └── Dockerfile │ ├── openwrt-x64-openjdk8 │ │ └── Dockerfile │ ├── raspbian-bullseye │ │ └── Dockerfile │ ├── rhel8-x64 │ │ └── Dockerfile │ ├── swift-5-al2-x64 │ │ └── Dockerfile │ ├── swift-5-ubuntu-x64 │ │ └── Dockerfile │ ├── ubuntu-18-x64 │ │ └── Dockerfile │ ├── ubuntu-20-aarch64 │ │ └── Dockerfile │ ├── ubuntu-20-x64 │ │ └── Dockerfile │ └── ubuntu-22-x64 │ │ └── Dockerfile └── workflows │ ├── closed-issue-message.yml │ ├── create-channel.yml │ ├── create-release.yml │ ├── docker_buildx.sh │ ├── handle-stale-discussions.yml │ ├── install_buildx.sh │ ├── issue-regression-labeler.yml │ ├── lint.yml │ ├── linux-container-ci.sh │ ├── sanity-test.yml │ └── stale_issue.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── builder ├── __init__.py ├── __main__.py ├── actions │ ├── __init__.py │ ├── cmake.py │ ├── git.py │ ├── install.py │ ├── mirror.py │ ├── release.py │ ├── script.py │ ├── setup_cross_ci_crt_environment.py │ ├── setup_cross_ci_helpers.py │ └── setup_event_stream_echo_server.py ├── core │ ├── __init__.py │ ├── action.py │ ├── api.py │ ├── data.py │ ├── env.py │ ├── fetch.py │ ├── host.py │ ├── project.py │ ├── scripts.py │ ├── shell.py │ ├── spec.py │ ├── toolchain.py │ ├── util.py │ └── vmod.py ├── imports │ ├── __init__.py │ ├── awslc.py │ ├── boringssl.py │ ├── dockcross.py │ ├── gcc.py │ ├── golang.py │ ├── jdk.py │ ├── libcrypto.py │ ├── llvm.py │ ├── msvc.py │ ├── ndk.py │ ├── nodejs.py │ └── s2n.py └── main.py ├── requirements.txt ├── setup.cfg ├── setup.py └── tests ├── .builder ├── scripts │ └── test_action.py └── verify_imports.py ├── builder.json ├── data └── lib-1 │ └── builder.json ├── test_env.py ├── test_project.py ├── test_toolchain.py ├── test_utils.py └── test_vmod.py /.github/ISSUE_TEMPLATE/bug-report.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "🐛 Bug Report" 3 | description: Report a bug 4 | title: "(short issue description)" 5 | labels: [bug, needs-triage] 6 | assignees: [] 7 | body: 8 | - type: textarea 9 | id: description 10 | attributes: 11 | label: Describe the bug 12 | description: What is the problem? A clear and concise description of the bug. 13 | validations: 14 | required: true 15 | - type: checkboxes 16 | id: regression 17 | attributes: 18 | label: Regression Issue 19 | description: What is a regression? If it worked in a previous version but doesn't in the latest version, it's considered a regression. In this case, please provide specific version number in the report. 20 | options: 21 | - label: Select this option if this issue appears to be a regression. 22 | required: false 23 | - type: textarea 24 | id: expected 25 | attributes: 26 | label: Expected Behavior 27 | description: | 28 | What did you expect to happen? 29 | validations: 30 | required: true 31 | - type: textarea 32 | id: current 33 | attributes: 34 | label: Current Behavior 35 | description: | 36 | What actually happened? 37 | 38 | Please include full errors, uncaught exceptions, stack traces, and relevant logs. 39 | If service responses are relevant, please include wire logs. 40 | validations: 41 | required: true 42 | - type: textarea 43 | id: reproduction 44 | attributes: 45 | label: Reproduction Steps 46 | description: | 47 | Provide a self-contained, concise snippet of code that can be used to reproduce the issue. 48 | For more complex issues provide a repo with the smallest sample that reproduces the bug. 49 | 50 | Avoid including business logic or unrelated code, it makes diagnosis more difficult. 51 | The code sample should be an SSCCE. See http://sscce.org/ for details. In short, please provide a code sample that we can copy/paste, run and reproduce. 52 | validations: 53 | required: true 54 | - type: textarea 55 | id: solution 56 | attributes: 57 | label: Possible Solution 58 | description: | 59 | Suggest a fix/reason for the bug 60 | validations: 61 | required: false 62 | - type: textarea 63 | id: context 64 | attributes: 65 | label: Additional Information/Context 66 | description: | 67 | Anything else that might be relevant for troubleshooting this bug. Providing context helps us come up with a solution that is most useful in the real world. 68 | validations: 69 | required: false 70 | 71 | - type: input 72 | id: aws-crt-builder-version 73 | attributes: 74 | label: aws-crt-builder version used 75 | validations: 76 | required: true 77 | 78 | - type: input 79 | id: compiler-version 80 | attributes: 81 | label: Compiler and version used 82 | validations: 83 | required: true 84 | 85 | - type: input 86 | id: operating-system 87 | attributes: 88 | label: Operating System and version 89 | validations: 90 | required: true 91 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: 💬 General Question 4 | url: https://github.com/awslabs/aws-crt-builder/discussions/categories/q-a 5 | about: Please ask and answer questions as a discussion thread 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "📕 Documentation Issue" 3 | description: Report an issue in the API Reference documentation or Developer Guide 4 | title: "(short issue description)" 5 | labels: [documentation, needs-triage] 6 | assignees: [] 7 | body: 8 | - type: textarea 9 | id: description 10 | attributes: 11 | label: Describe the issue 12 | description: A clear and concise description of the issue. 13 | validations: 14 | required: true 15 | 16 | - type: textarea 17 | id: links 18 | attributes: 19 | label: Links 20 | description: | 21 | Include links to affected documentation page(s). 22 | validations: 23 | required: true 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🚀 Feature Request 3 | description: Suggest an idea for this project 4 | title: "(short issue description)" 5 | labels: [feature-request, needs-triage] 6 | assignees: [] 7 | body: 8 | - type: textarea 9 | id: description 10 | attributes: 11 | label: Describe the feature 12 | description: A clear and concise description of the feature you are proposing. 13 | validations: 14 | required: true 15 | - type: textarea 16 | id: use-case 17 | attributes: 18 | label: Use Case 19 | description: | 20 | Why do you need this feature? For example: "I'm always frustrated when..." 21 | validations: 22 | required: true 23 | - type: textarea 24 | id: solution 25 | attributes: 26 | label: Proposed Solution 27 | description: | 28 | Suggest how to implement the addition or change. Please include prototype/workaround/sketch/reference implementation. 29 | validations: 30 | required: false 31 | - type: textarea 32 | id: other 33 | attributes: 34 | label: Other Information 35 | description: | 36 | Any alternative solutions or features you considered, a more detailed explanation, stack traces, related issues, links for context, etc. 37 | validations: 38 | required: false 39 | - type: checkboxes 40 | id: ack 41 | attributes: 42 | label: Acknowledgements 43 | options: 44 | - label: I may be able to implement this feature request 45 | required: false 46 | - label: This feature might incur a breaking change 47 | required: false 48 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | *Issue #, if available:* 2 | 3 | *Description of changes:* 4 | 5 | 6 | By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. 7 | -------------------------------------------------------------------------------- /.github/actions/check-submodules/README.md: -------------------------------------------------------------------------------- 1 | # check-submodules 2 | Scan all submodules in a repo and ensure they're using an official release tag (tag must fit pattern "vX.Y.Z"), 3 | and that the release tag hasn't accidentally rolled backwards. 4 | 5 | This is intended for use with the CRT bindings (ex: aws-crt-java). 6 | 7 | ## Notes on node_modules/ 8 | Github Actions tutorials tell you to commit `node_modules/`. 9 | But we don't do that, because then Dependabot will scan all the 3rd-party`package.json` 10 | files and start flagging vulnerabilities in "devDependencies" that don't really affect us. 11 | 12 | Instead, the `prepare` script is used to compile `index.js` and ALL dependencies to `dist/index.js`. 13 | 14 | The downside is that you must remember to run `npm install` after modifying `index.js` 15 | or any dependencies. 16 | 17 | ## To update NPM dependencies: 18 | ``` 19 | cd path/to/aws-crt-builder/.github/actions/check-submodules 20 | npm update 21 | npm install 22 | ``` 23 | 24 | ## To run locally: 25 | ```sh 26 | # ensure you've compiled the latest changes 27 | cd path/to/aws-crt-builder/.github/actions/check-submodules 28 | npm install 29 | 30 | # now run on a specific repo 31 | cd path/to/aws-crt-java 32 | node path/to/aws-crt-builder/.github/actions/check-submodules/dist/index.js 33 | ``` 34 | 35 | 36 | -------------------------------------------------------------------------------- /.github/actions/check-submodules/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Check Submodules' 2 | description: 'Check submodule commits (ex: avoid accidental rollback)' 3 | runs: 4 | using: 'node16' 5 | # We use ncc to compile EVERYTHING into a single file. 6 | # We do this instead of committing node_modules/ to github. 7 | # The big downside of committing node_modules/ is that Dependabot 8 | # will go crazy scanning 3rdparty package.json files and flagging their 9 | # devDependencies for vulnerabilities. 10 | main: 'dist/index.js' 11 | -------------------------------------------------------------------------------- /.github/actions/check-submodules/index.js: -------------------------------------------------------------------------------- 1 | const core = require('@actions/core'); 2 | const exec = require('@actions/exec'); 3 | 4 | // Run an external command. 5 | // cwd: optional string 6 | // check: whether to raise an exception if returnCode is non-zero. Defaults to true. 7 | const run = async function (args, opts = {}) { 8 | var result = {}; 9 | result.stdout = ''; 10 | 11 | var execOpts = {}; 12 | execOpts.listeners = { 13 | stdout: (data) => { 14 | result.stdout += data.toString(); 15 | }, 16 | }; 17 | execOpts.ignoreReturnCode = opts.check == false; 18 | 19 | if ('cwd' in opts) { 20 | execOpts.cwd = opts.cwd; 21 | } 22 | 23 | result.returnCode = await exec.exec(args[0], args.slice(1), execOpts); 24 | return result; 25 | } 26 | 27 | // Returns array of submodules, where each item has properties: name, path, url 28 | const getSubmodules = async function () { 29 | const gitResult = await run(['git', 'config', '--file', '.gitmodules', '--list']); 30 | // output looks like: 31 | // submodule.aws-common-runtime/aws-c-common.path=crt/aws-c-common 32 | // submodule.aws-common-runtime/aws-c-common.url=https://github.com/awslabs/aws-c-common.git 33 | // ... 34 | const re = /submodule\.(.+)\.(path|url)=(.+)/; 35 | 36 | // build map with properties of each submodule 37 | var map = {}; 38 | 39 | const lines = gitResult.stdout.split('\n'); 40 | for (var i = 0; i < lines.length; i++) { 41 | const match = re.exec(lines[i]); 42 | if (!match) { 43 | continue; 44 | } 45 | 46 | const submoduleId = match[1]; 47 | const property = match[2]; 48 | const value = match[3]; 49 | 50 | let mapEntry = map[submoduleId] || {}; 51 | if (property === 'path') { 52 | mapEntry.path = value; 53 | // get "name" from final directory in path 54 | mapEntry.name = value.split('/').pop() 55 | } else if (property === 'url') { 56 | mapEntry.url = value; 57 | } else { 58 | continue; 59 | } 60 | 61 | map[submoduleId] = mapEntry; 62 | } 63 | 64 | // return array, sorted by name 65 | return Object.values(map).sort((a, b) => a.name.localeCompare(b.name)); 66 | } 67 | 68 | // Diff the submodule against its state on origin/main. 69 | // Returns null if they're the same. 70 | // Otherwise returns something like {thisCommit: 'c74534c', mainCommit: 'b6656aa'} 71 | const diffSubmodule = async function (submodule) { 72 | const gitResult = await run(['git', 'diff', `origin/main`, '--', submodule.path]); 73 | const stdout = gitResult.stdout; 74 | 75 | // output looks like this: 76 | // 77 | // diff --git a/crt/aws-c-auth b/crt/aws-c-auth 78 | // index b6656aa..c74534c 160000 79 | // --- a/crt/aws-c-auth 80 | // +++ b/crt/aws-c-auth 81 | // @@ -1 +1 @@ 82 | // -Subproject commit b6656aad42edd5d11eea50936cb60359a6338e0b 83 | // +Subproject commit c74534c13264868bbbd14b419c291580d3dd9141 84 | try { 85 | // let's just be naive and only look at the last 2 lines 86 | // if this fails in any way, report no difference 87 | var result = {} 88 | result.mainCommit = stdout.match('\\-Subproject commit ([a-f0-9]{40})')[1]; 89 | result.thisCommit = stdout.match('\\+Subproject commit ([a-f0-9]{40})')[1]; 90 | return result; 91 | } catch (error) { 92 | return null; 93 | } 94 | } 95 | 96 | // Returns whether one commit is an ancestor of another. 97 | const isAncestor = async function (ancestor, descendant, cwd) { 98 | const gitResult = await run(['git', 'merge-base', '--is-ancestor', ancestor, descendant], { check: false, cwd: cwd }); 99 | if (gitResult.returnCode == 0) { 100 | return true; 101 | } 102 | if (gitResult.returnCode == 1) { 103 | return false; 104 | } 105 | throw new Error(`The process 'git' failed with exit code ${gitResult.returnCode}`); 106 | } 107 | 108 | // Returns the release tag for a commit, or null if there is none 109 | const getReleaseTag = async function (commit, cwd) { 110 | const gitResult = await run(['git', 'describe', '--tags', '--exact-match', commit], { cwd: cwd, check: false }); 111 | if (gitResult.returnCode != 0) { 112 | return null; 113 | } 114 | 115 | // ensure it's a properly formatted release tag 116 | const match = gitResult.stdout.match(/^(v[0-9]+\.[0-9]+\.[0-9]+)$/m); 117 | if (!match) { 118 | return null; 119 | } 120 | 121 | return match[1]; 122 | } 123 | 124 | 125 | const checkSubmodules = async function () { 126 | const submodules = await getSubmodules(); 127 | for (var i = 0; i < submodules.length; i++) { 128 | const submodule = submodules[i]; 129 | 130 | // Diff the submodule against its state on origin/main. 131 | // If there's no difference, then there's no need to analyze further 132 | const diff = await diffSubmodule(submodule); 133 | if (diff == null) { 134 | continue; 135 | } 136 | 137 | // Ensure submodule is at an acceptable commit: 138 | // For repos the Common Runtime team controls, it must be at a tagged release. 139 | // For other repos, where we can't just cut a release ourselves, it needs to at least be on the main branch. 140 | const thisTag = await getReleaseTag(diff.thisCommit, submodule.path); 141 | if (!thisTag) { 142 | const nonCrtRepo = /^(aws-lc|s2n|s2n-tls)$/ 143 | if (nonCrtRepo.test(submodule.name)) { 144 | const isOnMain = await isAncestor(diff.thisCommit, 'origin/main', submodule.path); 145 | if (!isOnMain) { 146 | if (/^(aws-lc)$/.test(submodule.name)) { 147 | // for aws-lc, we may use a branch for FIPS support. 148 | const isOnFIPS = await isAncestor(diff.thisCommit, 'origin/fips-2024-09-27', submodule.path); 149 | if (!isOnFIPS) { 150 | core.setFailed(`Submodule ${submodule.name} is using a branch`); 151 | return; 152 | } 153 | } else { 154 | core.setFailed(`Submodule ${submodule.name} is using a branch`); 155 | return; 156 | } 157 | } 158 | } else { 159 | core.setFailed(`Submodule ${submodule.name} is not using a tagged release`); 160 | return; 161 | } 162 | } 163 | 164 | // prefer to use tags for further operations since they're easier to grok than commit hashes 165 | const mainTag = await getReleaseTag(diff.mainCommit, submodule.path); 166 | const thisCommit = thisTag || diff.thisCommit; 167 | const mainCommit = mainTag || diff.mainCommit; 168 | 169 | // freak out if our branch's submodule is older than where we're merging 170 | if (await isAncestor(thisCommit, mainCommit, submodule.path)) { 171 | core.setFailed(`Submodule ${submodule.name} is newer on origin/main:` 172 | + ` ${mainCommit} vs ${thisCommit} on this branch`); 173 | return; 174 | } 175 | 176 | } 177 | } 178 | 179 | const main = async function () { 180 | try { 181 | await checkSubmodules(); 182 | } catch (error) { 183 | core.setFailed(error.message); 184 | } 185 | } 186 | 187 | main() 188 | -------------------------------------------------------------------------------- /.github/actions/check-submodules/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "check-submodules", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "check-submodules", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@actions/core": "^1.11.1", 13 | "@actions/exec": "^1.1.1" 14 | }, 15 | "devDependencies": { 16 | "@vercel/ncc": "^0.38.1" 17 | } 18 | }, 19 | "node_modules/@actions/core": { 20 | "version": "1.11.1", 21 | "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", 22 | "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", 23 | "license": "MIT", 24 | "dependencies": { 25 | "@actions/exec": "^1.1.1", 26 | "@actions/http-client": "^2.0.1" 27 | } 28 | }, 29 | "node_modules/@actions/exec": { 30 | "version": "1.1.1", 31 | "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", 32 | "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", 33 | "license": "MIT", 34 | "dependencies": { 35 | "@actions/io": "^1.0.1" 36 | } 37 | }, 38 | "node_modules/@actions/http-client": { 39 | "version": "2.2.3", 40 | "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", 41 | "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", 42 | "license": "MIT", 43 | "dependencies": { 44 | "tunnel": "^0.0.6", 45 | "undici": "^5.25.4" 46 | } 47 | }, 48 | "node_modules/@actions/io": { 49 | "version": "1.1.3", 50 | "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", 51 | "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", 52 | "license": "MIT" 53 | }, 54 | "node_modules/@fastify/busboy": { 55 | "version": "2.1.1", 56 | "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", 57 | "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", 58 | "license": "MIT", 59 | "engines": { 60 | "node": ">=14" 61 | } 62 | }, 63 | "node_modules/@vercel/ncc": { 64 | "version": "0.38.3", 65 | "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz", 66 | "integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==", 67 | "dev": true, 68 | "license": "MIT", 69 | "bin": { 70 | "ncc": "dist/ncc/cli.js" 71 | } 72 | }, 73 | "node_modules/tunnel": { 74 | "version": "0.0.6", 75 | "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", 76 | "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", 77 | "license": "MIT", 78 | "engines": { 79 | "node": ">=0.6.11 <=0.7.0 || >=0.7.3" 80 | } 81 | }, 82 | "node_modules/undici": { 83 | "version": "5.28.5", 84 | "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.5.tgz", 85 | "integrity": "sha512-zICwjrDrcrUE0pyyJc1I2QzBkLM8FINsgOrt6WjA+BgajVq9Nxu2PbFFXUrAggLfDXlZGZBVZYw7WNV5KiBiBA==", 86 | "license": "MIT", 87 | "dependencies": { 88 | "@fastify/busboy": "^2.0.0" 89 | }, 90 | "engines": { 91 | "node": ">=14.0" 92 | } 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /.github/actions/check-submodules/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "check-submodules", 3 | "version": "1.0.0", 4 | "description": "Check submodule commits (ex: avoid accidental rollback)", 5 | "main": "index.js", 6 | "scripts": { 7 | "prepare": "ncc build index.js" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "@actions/core": "^1.11.1", 14 | "@actions/exec": "^1.1.1" 15 | }, 16 | "devDependencies": { 17 | "@vercel/ncc": "^0.38.1" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /.github/actions/release-tag/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Release Tag' 2 | description: 'Exports either the release tag or branch name as release_tag' 3 | inputs: 4 | output: 5 | description: 'Path to write release_tag to' 6 | outputs: 7 | release_tag: 8 | description: 'Either the release tag or branch name' 9 | 10 | runs: 11 | using: 'node16' 12 | main: 'packed/index.js' 13 | -------------------------------------------------------------------------------- /.github/actions/release-tag/index.js: -------------------------------------------------------------------------------- 1 | const core = require('@actions/core'); 2 | const fs = require('fs'); 3 | 4 | try { 5 | const github_ref = process.env.GITHUB_REF; 6 | const parts = github_ref.split('/'); 7 | const branch = parts[parts.length - 1]; 8 | var release_tag = branch; 9 | // GITHUB_REF can be refs/pull//merge for PR openings 10 | const branches = ['main', 'master', 'merge'] 11 | if (branches.includes(branch)) { 12 | const spawnSync = require('child_process').spawnSync; 13 | const result = spawnSync('git', ['describe', '--abbrev=0'], { 14 | timeout: 2000 15 | }); 16 | if (result.status == 0) { 17 | release_tag = result.stdout.trim(); 18 | } 19 | } 20 | 21 | core.setOutput('release_tag', release_tag); 22 | const output_path = core.getInput('output'); 23 | if (output_path) { 24 | fs.writeFileSync(output_path, release_tag); 25 | } 26 | } catch (error) { 27 | core.setFailed(error.message); 28 | } 29 | -------------------------------------------------------------------------------- /.github/actions/release-tag/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "release-tag", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "release-tag", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@actions/core": "^1.10.1" 13 | }, 14 | "devDependencies": { 15 | "@vercel/ncc": "^0.38.1" 16 | } 17 | }, 18 | "node_modules/@actions/core": { 19 | "version": "1.11.1", 20 | "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", 21 | "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", 22 | "license": "MIT", 23 | "dependencies": { 24 | "@actions/exec": "^1.1.1", 25 | "@actions/http-client": "^2.0.1" 26 | } 27 | }, 28 | "node_modules/@actions/exec": { 29 | "version": "1.1.1", 30 | "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", 31 | "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", 32 | "license": "MIT", 33 | "dependencies": { 34 | "@actions/io": "^1.0.1" 35 | } 36 | }, 37 | "node_modules/@actions/http-client": { 38 | "version": "2.2.3", 39 | "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", 40 | "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", 41 | "license": "MIT", 42 | "dependencies": { 43 | "tunnel": "^0.0.6", 44 | "undici": "^5.25.4" 45 | } 46 | }, 47 | "node_modules/@actions/io": { 48 | "version": "1.1.3", 49 | "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", 50 | "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", 51 | "license": "MIT" 52 | }, 53 | "node_modules/@fastify/busboy": { 54 | "version": "2.1.1", 55 | "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", 56 | "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", 57 | "license": "MIT", 58 | "engines": { 59 | "node": ">=14" 60 | } 61 | }, 62 | "node_modules/@vercel/ncc": { 63 | "version": "0.38.3", 64 | "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz", 65 | "integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==", 66 | "dev": true, 67 | "license": "MIT", 68 | "bin": { 69 | "ncc": "dist/ncc/cli.js" 70 | } 71 | }, 72 | "node_modules/tunnel": { 73 | "version": "0.0.6", 74 | "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", 75 | "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", 76 | "license": "MIT", 77 | "engines": { 78 | "node": ">=0.6.11 <=0.7.0 || >=0.7.3" 79 | } 80 | }, 81 | "node_modules/undici": { 82 | "version": "5.28.5", 83 | "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.5.tgz", 84 | "integrity": "sha512-zICwjrDrcrUE0pyyJc1I2QzBkLM8FINsgOrt6WjA+BgajVq9Nxu2PbFFXUrAggLfDXlZGZBVZYw7WNV5KiBiBA==", 85 | "license": "MIT", 86 | "dependencies": { 87 | "@fastify/busboy": "^2.0.0" 88 | }, 89 | "engines": { 90 | "node": ">=14.0" 91 | } 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /.github/actions/release-tag/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "release-tag", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "keywords": [], 7 | "author": "", 8 | "license": "ISC", 9 | "scripts": { 10 | "prepare": "ncc build index.js --out packed" 11 | }, 12 | "dependencies": { 13 | "@actions/core": "^1.10.1" 14 | }, 15 | "devDependencies": { 16 | "@vercel/ncc": "^0.38.1" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Please see the documentation for all configuration options: 2 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 3 | 4 | version: 2 5 | updates: 6 | 7 | - package-ecosystem: "github-actions" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | 12 | - package-ecosystem: "npm" 13 | directory: ".github/actions/check-submodules" 14 | schedule: 15 | interval: "daily" 16 | 17 | - package-ecosystem: "npm" 18 | directory: ".github/actions/release-tag" 19 | schedule: 20 | interval: "daily" 21 | 22 | - package-ecosystem: "pip" 23 | directory: "/" 24 | schedule: 25 | interval: "daily" 26 | -------------------------------------------------------------------------------- /.github/docker-images/al2-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amazonlinux:2 2 | 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | ############################################################################### 7 | RUN yum -y update \ 8 | && yum -y install \ 9 | tar \ 10 | git \ 11 | sudo \ 12 | # Python 13 | python3 \ 14 | python3-devel \ 15 | python3-pip \ 16 | make \ 17 | gcc \ 18 | gcc-c++ \ 19 | which \ 20 | && yum clean all \ 21 | && rm -rf /var/cache/yum 22 | 23 | ############################################################################### 24 | # Python/AWS CLI 25 | ############################################################################### 26 | RUN python3 -m pip install setuptools virtualenv \ 27 | && python3 -m pip install --upgrade awscli \ 28 | && aws --version 29 | 30 | ############################################################################### 31 | # Install pre-built CMake 32 | ############################################################################### 33 | WORKDIR /tmp 34 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 35 | && tar xvzf cmake.tar.gz -C /usr/local \ 36 | && cmake --version \ 37 | && ctest --version \ 38 | && rm -f /tmp/cmake.tar.gz 39 | 40 | ############################################################################### 41 | # Install entrypoint 42 | ############################################################################### 43 | ADD entrypoint.sh /usr/local/bin/builder 44 | RUN chmod a+x /usr/local/bin/builder 45 | ENTRYPOINT ["/usr/local/bin/builder"] 46 | -------------------------------------------------------------------------------- /.github/docker-images/al2012-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amazonlinux:1 2 | 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | ############################################################################### 7 | RUN yum -y update \ 8 | && yum -y install \ 9 | tar \ 10 | git \ 11 | curl \ 12 | sudo \ 13 | # Python 14 | python36 \ 15 | python36-devel \ 16 | python36-pip \ 17 | make \ 18 | cmake3 \ 19 | gcc72 \ 20 | gcc72-c++ \ 21 | && yum clean all \ 22 | && rm -rf /var/cache/yum \ 23 | && ln -s /usr/bin/cmake3 /usr/bin/cmake \ 24 | && ln -s /usr/bin/ctest3 /usr/bin/ctest \ 25 | && cmake --version \ 26 | && ctest --version 27 | 28 | ############################################################################### 29 | # Python/AWS CLI 30 | ############################################################################### 31 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 32 | && python3 -m pip install --upgrade awscli \ 33 | && aws --version 34 | 35 | ############################################################################### 36 | # Install pre-built CMake 37 | ############################################################################### 38 | WORKDIR /tmp 39 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 40 | && tar xvzf cmake.tar.gz -C /usr/local \ 41 | && cmake --version \ 42 | && rm -f /tmp/cmake.tar.gz 43 | 44 | ############################################################################### 45 | # Install entrypoint 46 | ############################################################################### 47 | ADD entrypoint.sh /usr/local/bin/builder 48 | RUN chmod a+x /usr/local/bin/builder 49 | ENTRYPOINT ["/usr/local/bin/builder"] 50 | -------------------------------------------------------------------------------- /.github/docker-images/al2023-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amazonlinux:2023 2 | 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | ############################################################################### 7 | RUN yum -y update \ 8 | && yum -y install \ 9 | tar \ 10 | git \ 11 | sudo \ 12 | # Python 13 | python3 \ 14 | python3-devel \ 15 | python3-pip \ 16 | make \ 17 | gcc \ 18 | gcc-c++ \ 19 | which \ 20 | && yum clean all \ 21 | && rm -rf /var/cache/yum 22 | 23 | ############################################################################### 24 | # Python/AWS CLI 25 | ############################################################################### 26 | RUN python3 -m pip install setuptools virtualenv \ 27 | && python3 -m pip install --upgrade awscli \ 28 | && aws --version 29 | 30 | ############################################################################### 31 | # Install pre-built CMake 32 | ############################################################################### 33 | WORKDIR /tmp 34 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 35 | && tar xvzf cmake.tar.gz -C /usr/local \ 36 | && cmake --version \ 37 | && ctest --version \ 38 | && rm -f /tmp/cmake.tar.gz 39 | 40 | ############################################################################### 41 | # Install entrypoint 42 | ############################################################################### 43 | ADD entrypoint.sh /usr/local/bin/builder 44 | RUN chmod a+x /usr/local/bin/builder 45 | ENTRYPOINT ["/usr/local/bin/builder"] 46 | -------------------------------------------------------------------------------- /.github/docker-images/alpine-3.16-arm64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM arm64v8/alpine:3.16.2 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN apk update 7 | RUN apk add \ 8 | git \ 9 | curl \ 10 | sudo \ 11 | unzip \ 12 | # Python 13 | python3 \ 14 | python3-dev \ 15 | py3-pip \ 16 | build-base \ 17 | linux-headers \ 18 | ca-certificates \ 19 | cmake \ 20 | bash \ 21 | aws-cli \ 22 | perl-strictures 23 | 24 | WORKDIR /tmp 25 | 26 | ############################################################################### 27 | # Install entrypoint 28 | ############################################################################### 29 | ADD entrypoint.sh /usr/local/bin/builder 30 | RUN chmod a+x /usr/local/bin/builder 31 | ENTRYPOINT ["/usr/local/bin/builder"] 32 | -------------------------------------------------------------------------------- /.github/docker-images/alpine-3.16-armv6/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM arm32v6/alpine:3.16.2 2 | 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | ############################################################################### 7 | RUN apk update 8 | RUN apk add \ 9 | git \ 10 | curl \ 11 | sudo \ 12 | unzip \ 13 | # Python 14 | python3 \ 15 | python3-dev \ 16 | py3-pip \ 17 | build-base \ 18 | linux-headers \ 19 | ca-certificates \ 20 | cmake \ 21 | bash \ 22 | aws-cli 23 | 24 | WORKDIR /tmp 25 | 26 | ############################################################################### 27 | # Install entrypoint 28 | ############################################################################### 29 | ADD entrypoint.sh /usr/local/bin/builder 30 | RUN chmod a+x /usr/local/bin/builder 31 | ENTRYPOINT ["/usr/local/bin/builder"] 32 | -------------------------------------------------------------------------------- /.github/docker-images/alpine-3.16-armv7/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM arm32v7/alpine:3.16.2 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN apk update 7 | RUN apk add \ 8 | git \ 9 | curl \ 10 | sudo \ 11 | unzip \ 12 | # Python 13 | python3 \ 14 | python3-dev \ 15 | py3-pip \ 16 | build-base \ 17 | linux-headers \ 18 | ca-certificates \ 19 | cmake \ 20 | bash \ 21 | aws-cli 22 | 23 | WORKDIR /tmp 24 | 25 | ############################################################################### 26 | # Install entrypoint 27 | ############################################################################### 28 | ADD entrypoint.sh /usr/local/bin/builder 29 | RUN chmod a+x /usr/local/bin/builder 30 | ENTRYPOINT ["/usr/local/bin/builder"] 31 | -------------------------------------------------------------------------------- /.github/docker-images/alpine-3.16-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amd64/alpine:3.16.2 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN apk update 7 | RUN apk add \ 8 | git \ 9 | curl \ 10 | sudo \ 11 | unzip \ 12 | # Python 13 | python3 \ 14 | python3-dev \ 15 | py3-pip \ 16 | build-base \ 17 | linux-headers \ 18 | ca-certificates \ 19 | cmake \ 20 | bash \ 21 | aws-cli \ 22 | perl-strictures 23 | 24 | WORKDIR /tmp 25 | 26 | ############################################################################### 27 | # Install entrypoint 28 | ############################################################################### 29 | ADD entrypoint.sh /usr/local/bin/builder 30 | RUN chmod a+x /usr/local/bin/builder 31 | ENTRYPOINT ["/usr/local/bin/builder"] 32 | -------------------------------------------------------------------------------- /.github/docker-images/alpine-3.16-x86/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM i386/alpine:3.16.2 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN apk update 7 | RUN apk add \ 8 | git \ 9 | curl \ 10 | sudo \ 11 | unzip \ 12 | # Python 13 | python3 \ 14 | python3-dev \ 15 | py3-pip \ 16 | build-base \ 17 | linux-headers \ 18 | ca-certificates \ 19 | cmake \ 20 | bash \ 21 | aws-cli 22 | 23 | WORKDIR /tmp 24 | 25 | ############################################################################### 26 | # Install entrypoint 27 | ############################################################################### 28 | ADD entrypoint.sh /usr/local/bin/builder 29 | RUN chmod a+x /usr/local/bin/builder 30 | ENTRYPOINT ["/usr/local/bin/builder"] 31 | -------------------------------------------------------------------------------- /.github/docker-images/build_cmake.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This builds CMake in the specified container, and uploads the result to S3 for use in building future containers 4 | 5 | set -ex 6 | 7 | [ $# -eq 3 ] 8 | variant=$1 9 | arch=$2 10 | version=$3 11 | 12 | # 3.13.5 is the last version to work with ancient glibc 13 | CMAKE_VERSION=3.13.5 14 | CMAKE_INSTALL_VERSION=3.13 15 | 16 | # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY must be in env vars to pass to container 17 | [ ! -z "$AWS_ACCESS_KEY_ID" ] && [ ! -z "$AWS_SECRET_ACCESS_KEY" ] 18 | 19 | if [ ! -e /tmp/aws-crt-${variant}-${arch}.tar.gz ]; then 20 | aws s3 cp s3://aws-crt-builder/releases/${version}/aws-crt-${variant}-${arch}.tar.gz /tmp 21 | docker load < /tmp/aws-crt-${variant}-${arch}.tar.gz 22 | fi 23 | 24 | container=$(docker run -dit --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint /bin/sh docker.pkg.github.com/awslabs/aws-crt-builder/aws-crt-${variant}-${arch}:${version}) 25 | docker exec ${container} sh -c "cd /tmp && curl -LO https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}.tar.gz" 26 | docker exec ${container} sh -c "cd /tmp && tar xzf cmake-${CMAKE_VERSION}.tar.gz && cd cmake-${CMAKE_VERSION} && ./bootstrap -- -DCMAKE_BUILD_TYPE=Release" 27 | docker exec ${container} sh -c "cd /tmp/cmake-${CMAKE_VERSION} && make -j 4 && make install" 28 | docker exec ${container} sh -c "tar czf /tmp/cmake-${CMAKE_VERSION}-${variant}-${arch}.tar.gz -C /usr/local share/cmake-${CMAKE_INSTALL_VERSION} bin/cmake bin/ctest bin/cpack doc/cmake-${CMAKE_INSTALL_VERSION}" 29 | docker exec ${container} sh -c "aws s3 cp /tmp/cmake-${CMAKE_VERSION}-${variant}-${arch}.tar.gz s3://aws-crt-builder/_binaries/cmake/cmake-${CMAKE_VERSION}-${variant}-${arch}.tar.gz" 30 | docker stop ${container} 31 | -------------------------------------------------------------------------------- /.github/docker-images/cache_maven.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This clones aws-crt-java in the specified container, builds it with maven, then caches maven's respository to S3 4 | 5 | set -ex 6 | 7 | [ $# -ge 3 ] 8 | variant=$1 9 | arch=$2 10 | version=$3 11 | 12 | crt_java_branch=main 13 | if [ $# -gt 3 ]; then 14 | crt_java_branch=$4 15 | fi 16 | 17 | # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY must be in env vars to pass to container 18 | [ ! -z "$AWS_ACCESS_KEY_ID" ] && [ ! -z "$AWS_SECRET_ACCESS_KEY" ] 19 | 20 | if [ ! -e /tmp/aws-crt-${variant}-${arch}.tar.gz ]; then 21 | aws s3 cp s3://aws-crt-builder/_docker/aws-crt-${variant}-${arch}.tar.gz /tmp 22 | docker load < /tmp/aws-crt-${variant}-${arch}.tar.gz 23 | fi 24 | 25 | container=$(docker run -dit --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint /bin/sh docker.pkg.github.com/awslabs/aws-crt-builder/aws-crt-${variant}-${arch}:${version}) 26 | docker exec ${container} sh -c "cd /tmp && builder build -p aws-crt-java --branch=${crt_java_branch} || true" 27 | docker exec ${container} sh -c "tar cvzf /tmp/maven-${variant}-${arch}.tar.gz -C /root/.m2 ." 28 | docker exec ${container} sh -c "aws s3 cp /tmp/maven-${variant}-${arch}.tar.gz s3://aws-crt-builder/_binaries/maven/maven-${variant}-${arch}.tar.gz" 29 | docker stop ${container} 30 | -------------------------------------------------------------------------------- /.github/docker-images/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | args=("$@") 6 | 7 | # allow a bashrc to be filled out in the docker image and then run by this script as the entrypoint 8 | if [ -e $HOME/.bashrc ]; then 9 | source $HOME/.bashrc 10 | fi 11 | 12 | version=LATEST 13 | if [[ "${args[0]}" == "--version="* ]]; then 14 | version=${args[0]} 15 | version=$(echo $version | cut -f2 -d=) 16 | args=(${args[@]:1}) 17 | fi 18 | 19 | if [ $(echo $version | grep -E '^v[0-9\.]+$') ]; then 20 | version=releases/$version 21 | elif [[ $version != 'channels/'* ]]; then 22 | version=channels/$version 23 | fi 24 | 25 | # download the version of builder requested 26 | curl -sSL -o /usr/local/bin/builder.pyz --retry 3 https://d19elf31gohf1l.cloudfront.net/${version}/builder.pyz?date=`date +%s` 27 | builder=/usr/local/bin/builder.pyz 28 | chmod a+x $builder 29 | 30 | # on manylinux, use the latest python3 via symlink 31 | if [ -x /opt/python/cp39-cp39/bin/python ] && [ ! -e /usr/local/bin/python3 ]; then 32 | ln -s /opt/python/cp39-cp39/bin/python /usr/local/bin/python3 33 | fi 34 | 35 | # Figure out where to work based on environment, default to . 36 | if [ -d "$GITHUB_WORKSPACE" ]; then 37 | cd $GITHUB_WORKSPACE 38 | elif [ -d "$CODEBUILD_SRC_DIR" ]; then 39 | cd $CODEBUILD_SRC_DIR 40 | else 41 | cd ~ 42 | fi 43 | 44 | # Launch the builder with whatever args were passed to this script 45 | $builder ${args[@]} 46 | -------------------------------------------------------------------------------- /.github/docker-images/fedora-34-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amd64/fedora:34 2 | 3 | SHELL ["/bin/bash", "-c"] 4 | 5 | RUN dnf update -y 6 | RUN dnf install -y make gcc gcc-c++ git cmake curl wget python3 python3-pip python3-devel findutils 7 | 8 | ############################################################################### 9 | # Python/AWS CLI 10 | ############################################################################### 11 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 12 | && python3 -m pip install --upgrade awscli \ 13 | && aws --version 14 | 15 | ############################################################################### 16 | # Install entrypoint 17 | ############################################################################### 18 | ADD entrypoint.sh /usr/local/bin/builder 19 | RUN chmod a+x /usr/local/bin/builder 20 | ENTRYPOINT ["/usr/local/bin/builder"] 21 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux1-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux FROM docker.pkg.github.com/awslabs/aws-crt-python/manylinux1:x64 2 | # and: https://github.com/pypa/python-manylinux-demo 3 | FROM quay.io/pypa/manylinux1_x86_64 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y update \ 9 | && yum -y install sudo \ 10 | && yum clean all 11 | 12 | ############################################################################### 13 | # Python/AWS CLI 14 | ############################################################################### 15 | RUN /opt/python/cp37-cp37m/bin/python -m pip install --upgrade setuptools virtualenv \ 16 | && /opt/python/cp37-cp37m/bin/python -m pip install --upgrade awscli \ 17 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 18 | && which aws \ 19 | && aws --version 20 | 21 | ############################################################################### 22 | # Install pre-built CMake 23 | ############################################################################### 24 | WORKDIR /tmp 25 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 26 | && tar xvzf cmake.tar.gz -C /usr/local \ 27 | && cmake --version \ 28 | && rm -f /tmp/cmake.tar.gz 29 | 30 | ############################################################################### 31 | # Install entrypoint 32 | ############################################################################### 33 | ADD entrypoint.sh /usr/local/bin/builder 34 | RUN chmod a+x /usr/local/bin/builder 35 | ENTRYPOINT ["/usr/local/bin/builder"] 36 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux1-x86/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux FROM docker.pkg.github.com/awslabs/aws-crt-python/manylinux1:x86 2 | # and: quay.io/pypa/manylinux1_x86_64 3 | FROM quay.io/pypa/manylinux1_i686 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y update \ 9 | && yum -y install sudo \ 10 | && yum clean all 11 | 12 | ############################################################################### 13 | # Python/AWS CLI 14 | ############################################################################### 15 | RUN /opt/python/cp37-cp37m/bin/python -m pip install --upgrade setuptools virtualenv \ 16 | && /opt/python/cp37-cp37m/bin/python -m pip install --upgrade awscli \ 17 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 18 | && which aws \ 19 | && aws --version 20 | 21 | ############################################################################### 22 | # Install pre-built CMake 23 | ############################################################################### 24 | WORKDIR /tmp 25 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x86.tar.gz -o cmake.tar.gz \ 26 | && tar xvzf cmake.tar.gz -C /usr/local \ 27 | && cmake --version \ 28 | && rm -f /tmp/cmake.tar.gz 29 | 30 | ############################################################################### 31 | # Install entrypoint 32 | ############################################################################### 33 | ADD entrypoint.sh /usr/local/bin/builder 34 | RUN chmod a+x /usr/local/bin/builder 35 | ENTRYPOINT ["/usr/local/bin/builder"] 36 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux2014-aarch64/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux 2 | # and: https://github.com/pypa/python-manylinux-demo 3 | FROM quay.io/pypa/manylinux2014_aarch64 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y install sudo \ 9 | && yum clean all \ 10 | && cmake --version \ 11 | && ctest --version 12 | 13 | ############################################################################### 14 | # Python/AWS CLI 15 | ############################################################################### 16 | RUN /opt/python/cp39-cp39/bin/python -m pip install --upgrade setuptools virtualenv \ 17 | && /opt/python/cp39-cp39/bin/python -m pip install --upgrade awscli \ 18 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 19 | && which aws \ 20 | && aws --version 21 | 22 | ############################################################################### 23 | # nodejs/npm 24 | ############################################################################### 25 | RUN curl -sL https://rpm.nodesource.com/setup_14.x | sudo bash - 26 | RUN sudo yum -y install nodejs && node --version 27 | 28 | ############################################################################### 29 | # Install entrypoint 30 | ############################################################################### 31 | ADD entrypoint.sh /usr/local/bin/builder 32 | RUN chmod a+x /usr/local/bin/builder 33 | ENTRYPOINT ["/usr/local/bin/builder"] 34 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux2014-armv7l/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux 2 | # and: https://github.com/pypa/python-manylinux-demo 3 | FROM quay.io/pypa/manylinux2014_armv7l 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y install sudo \ 9 | && yum clean all \ 10 | && cmake --version \ 11 | && ctest --version 12 | 13 | ############################################################################### 14 | # Python/AWS CLI 15 | ############################################################################### 16 | RUN /opt/python/cp39-cp39/bin/python -m pip install --upgrade setuptools virtualenv \ 17 | && /opt/python/cp39-cp39/bin/python -m pip install --upgrade awscli \ 18 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 19 | && which aws \ 20 | && aws --version 21 | 22 | ############################################################################### 23 | # Install entrypoint 24 | ############################################################################### 25 | ADD entrypoint.sh /usr/local/bin/builder 26 | RUN chmod a+x /usr/local/bin/builder 27 | ENTRYPOINT ["/usr/local/bin/builder"] 28 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux2014-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux 2 | # and: https://github.com/pypa/python-manylinux-demo 3 | FROM quay.io/pypa/manylinux2014_x86_64 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y install sudo cmake3 \ 9 | # used in java release pipeline 10 | maven \ 11 | && yum clean all \ 12 | && ln -s `which cmake3` /usr/bin/cmake \ 13 | && ln -s `which ctest3` /usr/bin/ctest \ 14 | && cmake --version \ 15 | && ctest --version 16 | 17 | ############################################################################### 18 | # Python/AWS CLI 19 | ############################################################################### 20 | RUN /opt/python/cp37-cp37m/bin/python -m pip install --upgrade setuptools virtualenv \ 21 | && /opt/python/cp37-cp37m/bin/python -m pip install --upgrade awscli \ 22 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 23 | && which aws \ 24 | && aws --version 25 | 26 | ############################################################################### 27 | # Install pre-built CMake 28 | ############################################################################### 29 | WORKDIR /tmp 30 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 31 | && tar xvzf cmake.tar.gz -C /usr/local \ 32 | && cmake --version \ 33 | && rm -f /tmp/cmake.tar.gz 34 | 35 | ############################################################################### 36 | # Install entrypoint 37 | ############################################################################### 38 | ADD entrypoint.sh /usr/local/bin/builder 39 | RUN chmod a+x /usr/local/bin/builder 40 | ENTRYPOINT ["/usr/local/bin/builder"] 41 | -------------------------------------------------------------------------------- /.github/docker-images/manylinux2014-x86/Dockerfile: -------------------------------------------------------------------------------- 1 | # See: https://github.com/pypa/manylinux 2 | # and: https://github.com/pypa/python-manylinux-demo 3 | FROM quay.io/pypa/manylinux2014_i686 4 | 5 | ############################################################################### 6 | # Basics 7 | ############################################################################### 8 | RUN yum -y install sudo \ 9 | # used in java release pipeline 10 | maven \ 11 | && yum clean all 12 | 13 | ############################################################################### 14 | # Python/AWS CLI 15 | ############################################################################### 16 | RUN /opt/python/cp37-cp37m/bin/python -m pip install --upgrade setuptools virtualenv \ 17 | && /opt/python/cp37-cp37m/bin/python -m pip install --upgrade awscli \ 18 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 19 | && which aws \ 20 | && aws --version 21 | 22 | ############################################################################### 23 | # Install pre-built CMake 24 | ############################################################################### 25 | WORKDIR /tmp 26 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux2014-x86.tar.gz -o cmake.tar.gz \ 27 | && tar xvzf cmake.tar.gz -C /usr/local \ 28 | && cmake --version \ 29 | && rm -f /tmp/cmake.tar.gz 30 | 31 | ############################################################################### 32 | # Install entrypoint 33 | ############################################################################### 34 | ADD entrypoint.sh /usr/local/bin/builder 35 | RUN chmod a+x /usr/local/bin/builder 36 | ENTRYPOINT ["/usr/local/bin/builder"] 37 | -------------------------------------------------------------------------------- /.github/docker-images/musllinux-1-1-aarch64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM quay.io/pypa/musllinux_1_1_aarch64 2 | 3 | ############################################################################### 4 | # Basics 5 | ############################################################################### 6 | RUN apk update && apk add sudo py3-pip 7 | 8 | ################################################################################ 9 | ## Python/AWS CLI 10 | ################################################################################ 11 | WORKDIR /tmp 12 | RUN python3 -m pip install awscli \ 13 | && aws --version 14 | 15 | ################################################################################# 16 | ### Install entrypoint 17 | ################################################################################# 18 | ADD entrypoint.sh /usr/local/bin/builder 19 | RUN chmod a+x /usr/local/bin/builder 20 | ENTRYPOINT ["/usr/local/bin/builder"] 21 | -------------------------------------------------------------------------------- /.github/docker-images/musllinux-1-1-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM quay.io/pypa/musllinux_1_1_x86_64 2 | 3 | ############################################################################### 4 | # Basics 5 | ############################################################################### 6 | RUN apk update && apk add sudo py3-pip 7 | 8 | ################################################################################ 9 | ## Python/AWS CLI 10 | ################################################################################ 11 | WORKDIR /tmp 12 | RUN python3 -m pip install awscli \ 13 | && aws --version 14 | 15 | ################################################################################# 16 | ### Install entrypoint 17 | ################################################################################# 18 | ADD entrypoint.sh /usr/local/bin/builder 19 | RUN chmod a+x /usr/local/bin/builder 20 | ENTRYPOINT ["/usr/local/bin/builder"] 21 | -------------------------------------------------------------------------------- /.github/docker-images/node-10-linux-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN yum -y update \ 7 | && yum -y install \ 8 | tar \ 9 | git \ 10 | curl \ 11 | sudo \ 12 | # Python 13 | python3 \ 14 | python3-devel \ 15 | python3-pip \ 16 | make \ 17 | gcc \ 18 | gcc-c++ \ 19 | && yum clean all \ 20 | && rm -rf /var/cache/yum 21 | 22 | ############################################################################### 23 | # Python/AWS CLI 24 | ############################################################################### 25 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 26 | && python3 -m pip install --upgrade awscli \ 27 | && aws --version 28 | 29 | ############################################################################### 30 | # Install pre-built CMake 31 | ############################################################################### 32 | WORKDIR /tmp 33 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13.5-node-10-linux-x64.tar.gz -o cmake.tar.gz \ 34 | && tar xvzf cmake.tar.gz -C /usr/local \ 35 | && cmake --version \ 36 | && ctest --version \ 37 | && rm -f /tmp/cmake.tar.gz 38 | 39 | ############################################################################### 40 | # Install entrypoint 41 | ############################################################################### 42 | ADD entrypoint.sh /usr/local/bin/builder 43 | RUN chmod a+x /usr/local/bin/builder 44 | ENTRYPOINT ["/usr/local/bin/builder"] 45 | -------------------------------------------------------------------------------- /.github/docker-images/opensuse-leap/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM opensuse/leap:15.3 2 | 3 | SHELL ["/bin/bash", "-c"] 4 | 5 | RUN zypper refresh 6 | RUN zypper install -y git gcc gcc-c++ cmake curl python39-devel python39-pip wget sudo tar gzip 7 | 8 | # normally we let the builder install this, but the builder does a non-standard install that doesn't play nice 9 | # with opensuse's alternatives framework. So just install the jdk 11 development package in the base container 10 | # and use it for this particular CI check. 11 | RUN zypper install -y java-11-openjdk-devel 12 | 13 | ############################################################################### 14 | # Python/AWS CLI 15 | ############################################################################### 16 | RUN python3.9 -m pip install --upgrade setuptools virtualenv \ 17 | && python3.9 -m pip install --upgrade awscli \ 18 | && aws --version 19 | 20 | ############################################################################### 21 | # Install entrypoint 22 | ############################################################################### 23 | ADD entrypoint.sh /usr/local/bin/builder 24 | RUN chmod a+x /usr/local/bin/builder 25 | ENTRYPOINT ["/usr/local/bin/builder"] 26 | -------------------------------------------------------------------------------- /.github/docker-images/openwrt-x64-openjdk8/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openwrt/rootfs:x86-64-v23.05.3 2 | 3 | # for some reason this directory isn't created by this point and we need it 4 | RUN mkdir -p /var/lock 5 | RUN mkdir -p /usr/local/bin 6 | 7 | RUN opkg update 8 | 9 | # packages in openwrt 10 | RUN opkg install git-http ca-bundle curl python3 python3-pip gcc make bash sudo perl 11 | 12 | # packages we have to get from alpine 13 | 14 | # first, pull apk directly 15 | RUN wget http://dl-cdn.alpinelinux.org/alpine/v3.16/main/x86_64/apk-tools-static-2.12.9-r3.apk 16 | RUN tar -xzf apk-tools-static-2.12.9-r3.apk 17 | 18 | # next, install stuff that either 19 | # (1) isn't in openwrt and we don't want to build ourselves (cmake, maven) 20 | # (2) appears to be broken (python3-awscli) 21 | # 22 | # Under normal circumstances, we let the repository (via builder.json and custom actions) guide the language runtime installation, but since 23 | # (1) openwrt does not have many runtimes in its package manager (and tools like maven that depend on those runtimes) 24 | # (2) we are only doing openwrt CI checks for Java 25 | # we install the desired JDK here as well. 26 | # 27 | RUN ./sbin/apk.static -X http://dl-cdn.alpinelinux.org/alpine/v3.16/main -X http://dl-cdn.alpinelinux.org/alpine/v3.16/community -U --allow-untrusted --initdb add cmake openjdk8 maven aws-cli 28 | 29 | # stub libraries for stuff we unconditionally link; functionality is all actually in musl already 30 | # long term we might want to make our recognition better, but this is a blocker for the s2n build 31 | RUN ar -rc /usr/lib/libpthread.a 32 | RUN ar -rc /usr/lib/libdl.a 33 | RUN ar -rc /usr/lib/librt.a 34 | RUN ar -rc /usr/lib/libm.a 35 | 36 | ############################################################################### 37 | # Install entrypoint 38 | ############################################################################### 39 | ADD entrypoint.sh /usr/local/bin/builder 40 | RUN chmod a+x /usr/local/bin/builder 41 | ENTRYPOINT ["/usr/local/bin/builder"] -------------------------------------------------------------------------------- /.github/docker-images/raspbian-bullseye/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=linux/arm/v7 dtcooper/raspberrypi-os:bullseye 2 | 3 | ############################################################################### 4 | # Install prereqs 5 | ############################################################################### 6 | RUN apt-get update -qq \ 7 | && apt-get -y install \ 8 | git \ 9 | curl \ 10 | sudo \ 11 | unzip \ 12 | clang \ 13 | cmake \ 14 | # Python 15 | python3.9 \ 16 | python3-dev \ 17 | python3-pip \ 18 | python3-venv \ 19 | build-essential \ 20 | # For PPAs 21 | software-properties-common \ 22 | apt-transport-https \ 23 | ca-certificates \ 24 | && apt-get clean 25 | 26 | ############################################################################### 27 | # Python/AWS CLI 28 | ############################################################################### 29 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 30 | && python3 -m pip install --upgrade awscli \ 31 | && ln -s `find /opt -name aws` /usr/local/bin/aws \ 32 | && which aws \ 33 | && aws --version 34 | 35 | 36 | ############################################################################### 37 | # Install entrypoint 38 | ############################################################################### 39 | ADD entrypoint.sh /usr/local/bin/builder 40 | RUN chmod a+x /usr/local/bin/builder 41 | ENTRYPOINT ["/usr/local/bin/builder"] 42 | -------------------------------------------------------------------------------- /.github/docker-images/rhel8-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.access.redhat.com/ubi8/ubi:latest 2 | 3 | SHELL ["/bin/bash", "-c"] 4 | 5 | RUN dnf install -y \ 6 | gcc \ 7 | gcc-c++ \ 8 | cmake \ 9 | wget \ 10 | git \ 11 | make \ 12 | sudo \ 13 | # RHEL8's default python3 is python3.6, which is EOL. 14 | # So install python3.9 (latest version on this distro, circa Oct 2022) 15 | python39-devel 16 | 17 | ############################################################################### 18 | # Python/AWS CLI 19 | ############################################################################### 20 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 21 | && python3 -m pip install --upgrade awscli \ 22 | && aws --version 23 | 24 | ############################################################################### 25 | # Install entrypoint 26 | ############################################################################### 27 | ADD entrypoint.sh /usr/local/bin/builder 28 | RUN chmod a+x /usr/local/bin/builder 29 | ENTRYPOINT ["/usr/local/bin/builder"] 30 | -------------------------------------------------------------------------------- /.github/docker-images/swift-5-al2-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://hub.docker.com/_/swift 2 | FROM swift:5.9.2-amazonlinux2 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | # any prereqs that appear to be missing are installed on base swift image i.e. tar, git 7 | ############################################################################### 8 | RUN yum -y install \ 9 | curl \ 10 | sudo \ 11 | # Python 12 | python3 \ 13 | python3-pip \ 14 | openssl-devel \ 15 | && yum clean all \ 16 | && rm -rf /var/cache/yum 17 | 18 | ############################################################################### 19 | # Python/AWS CLI 20 | ############################################################################### 21 | RUN python3 -m pip install --upgrade setuptools virtualenv \ 22 | && python3 -m pip install --upgrade awscli \ 23 | && aws --version 24 | 25 | ############################################################################### 26 | # Install entrypoint 27 | ############################################################################### 28 | ADD entrypoint.sh /usr/local/bin/builder 29 | RUN chmod a+x /usr/local/bin/builder 30 | ENTRYPOINT ["/usr/local/bin/builder"] 31 | -------------------------------------------------------------------------------- /.github/docker-images/swift-5-ubuntu-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://hub.docker.com/_/swift 2 | FROM swift:5.9.2-focal 3 | 4 | ############################################################################### 5 | # Install prereqs 6 | # any prereqs that appear to be missing are installed on base swift image i.e. tar, git 7 | ############################################################################### 8 | RUN apt-get update -qq \ 9 | && apt-get -y install \ 10 | sudo \ 11 | curl \ 12 | wget \ 13 | # Python 14 | python3 \ 15 | python3-pip \ 16 | # For PPAs 17 | libssl-dev \ 18 | software-properties-common \ 19 | apt-transport-https \ 20 | ca-certificates \ 21 | && apt-get clean 22 | 23 | ############################################################################### 24 | # Add the corretto repo and public key and install corretto 25 | ############################################################################### 26 | RUN wget -O- https://apt.corretto.aws/corretto.key | sudo apt-key add - 27 | RUN sudo add-apt-repository 'deb https://apt.corretto.aws stable main' 28 | RUN apt-get -y install java-11-amazon-corretto-jdk 29 | 30 | ############################################################################### 31 | # Python/AWS CLI 32 | ############################################################################### 33 | RUN python3 -m pip install setuptools \ 34 | && python3 -m pip install --upgrade awscli \ 35 | && aws --version 36 | 37 | ############################################################################### 38 | # Install entrypoint 39 | ############################################################################### 40 | ADD entrypoint.sh /usr/local/bin/builder 41 | RUN chmod a+x /usr/local/bin/builder 42 | ENTRYPOINT ["/usr/local/bin/builder"] 43 | -------------------------------------------------------------------------------- /.github/docker-images/ubuntu-18-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:18.04 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ############################################################################### 6 | # Install prereqs 7 | ############################################################################### 8 | RUN apt-get update -qq \ 9 | && apt-get -y install \ 10 | git \ 11 | curl \ 12 | sudo \ 13 | unzip \ 14 | # Ubuntu18's default python3 is python3.6, which is EOL. 15 | # So install python3.8 (latest version on this distro, circa Oct 2022) 16 | python3.8-dev \ 17 | python3.8-venv \ 18 | # This installs pip for all python versions on the system 19 | # (there is no "python3.8-pip") 20 | python3-pip \ 21 | build-essential \ 22 | # For PPAs 23 | software-properties-common \ 24 | apt-transport-https \ 25 | ca-certificates \ 26 | && apt-get clean 27 | 28 | # Add the longsleep/golang-backports PPA 29 | RUN apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:longsleep/golang-backports && apt-get update 30 | 31 | # Install Go from the PPA 32 | RUN apt-get install -y golang-go 33 | 34 | ############################################################################### 35 | # Python/AWS CLI 36 | ############################################################################### 37 | WORKDIR /tmp 38 | 39 | RUN python3.8 -m pip install setuptools \ 40 | && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o awscliv2.zip \ 41 | && unzip awscliv2.zip \ 42 | && sudo aws/install \ 43 | && aws --version 44 | 45 | ############################################################################### 46 | # Install pre-built CMake 47 | ############################################################################### 48 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 49 | && tar xvzf cmake.tar.gz -C /usr/local \ 50 | && cmake --version \ 51 | && rm -f /tmp/cmake.tar.gz 52 | 53 | ############################################################################### 54 | # Install entrypoint 55 | ############################################################################### 56 | ADD entrypoint.sh /usr/local/bin/builder 57 | RUN chmod a+x /usr/local/bin/builder 58 | ENTRYPOINT ["/usr/local/bin/builder"] 59 | -------------------------------------------------------------------------------- /.github/docker-images/ubuntu-20-aarch64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM arm64v8/ubuntu:focal 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ############################################################################### 6 | # Install prereqs 7 | ############################################################################### 8 | RUN apt-get update -qq \ 9 | && apt-get -y install \ 10 | git \ 11 | curl \ 12 | sudo \ 13 | unzip \ 14 | python3 \ 15 | python3-dev \ 16 | python3-pip \ 17 | build-essential \ 18 | software-properties-common \ 19 | apt-transport-https \ 20 | ca-certificates \ 21 | cmake 22 | 23 | ############################################################################### 24 | # Python/AWS CLI 25 | ############################################################################### 26 | WORKDIR /tmp 27 | 28 | # this image comes with gcc9.3 which current version of aws-lc rejects 29 | RUN curl -L https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - \ 30 | && add-apt-repository ppa:ubuntu-toolchain-r/test \ 31 | && apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-11 main" \ 32 | && apt-get update -y \ 33 | && apt-get install clang-11 cmake -y -f \ 34 | && apt-get clean 35 | 36 | ENV CC=clang-11 37 | 38 | RUN python3 -m pip install setuptools \ 39 | && curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o awscliv2.zip \ 40 | && unzip awscliv2.zip \ 41 | && sudo aws/install \ 42 | && aws --version 43 | 44 | ############################################################################### 45 | # Install entrypoint 46 | ############################################################################### 47 | ADD entrypoint.sh /usr/local/bin/builder 48 | RUN chmod a+x /usr/local/bin/builder 49 | ENTRYPOINT ["/usr/local/bin/builder"] 50 | -------------------------------------------------------------------------------- /.github/docker-images/ubuntu-20-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ############################################################################### 6 | # Install prereqs 7 | ############################################################################### 8 | RUN apt-get update -qq \ 9 | && apt-get -y install \ 10 | git \ 11 | curl \ 12 | sudo \ 13 | unzip \ 14 | python3-dev \ 15 | python3-pip \ 16 | build-essential \ 17 | # For PPAs 18 | software-properties-common \ 19 | apt-transport-https \ 20 | ca-certificates \ 21 | && apt-get clean 22 | 23 | # Add the longsleep/golang-backports PPA 24 | RUN apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:longsleep/golang-backports && apt-get update 25 | 26 | # Install Go from the PPA 27 | RUN apt-get install -y golang-go 28 | 29 | ############################################################################### 30 | # Python/AWS CLI 31 | ############################################################################### 32 | WORKDIR /tmp 33 | 34 | RUN python3 -m pip install setuptools \ 35 | && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o awscliv2.zip \ 36 | && unzip awscliv2.zip \ 37 | && sudo aws/install \ 38 | && aws --version 39 | 40 | ############################################################################### 41 | # Install pre-built CMake 42 | ############################################################################### 43 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 44 | && tar xvzf cmake.tar.gz -C /usr/local \ 45 | && cmake --version \ 46 | && rm -f /tmp/cmake.tar.gz 47 | 48 | ############################################################################### 49 | # Install entrypoint 50 | ############################################################################### 51 | ADD entrypoint.sh /usr/local/bin/builder 52 | RUN chmod a+x /usr/local/bin/builder 53 | ENTRYPOINT ["/usr/local/bin/builder"] 54 | -------------------------------------------------------------------------------- /.github/docker-images/ubuntu-22-x64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:22.04 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ############################################################################### 6 | # Install prereqs 7 | ############################################################################### 8 | RUN apt-get update -qq \ 9 | && apt-get -y install \ 10 | git \ 11 | curl \ 12 | sudo \ 13 | unzip \ 14 | python3-dev \ 15 | python3-pip \ 16 | build-essential \ 17 | # For PPAs 18 | software-properties-common \ 19 | apt-transport-https \ 20 | ca-certificates \ 21 | golang-go \ 22 | && apt-get clean 23 | 24 | ############################################################################### 25 | # Python/AWS CLI 26 | ############################################################################### 27 | WORKDIR /tmp 28 | 29 | RUN python3 -m pip install setuptools \ 30 | && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o awscliv2.zip \ 31 | && unzip awscliv2.zip \ 32 | && sudo aws/install \ 33 | && aws --version 34 | 35 | ############################################################################### 36 | # Install pre-built CMake 37 | ############################################################################### 38 | RUN curl -sSL https://d19elf31gohf1l.cloudfront.net/_binaries/cmake/cmake-3.13-manylinux1-x64.tar.gz -o cmake.tar.gz \ 39 | && tar xvzf cmake.tar.gz -C /usr/local \ 40 | && cmake --version \ 41 | && rm -f /tmp/cmake.tar.gz 42 | 43 | ############################################################################### 44 | # Install entrypoint 45 | ############################################################################### 46 | ADD entrypoint.sh /usr/local/bin/builder 47 | RUN chmod a+x /usr/local/bin/builder 48 | ENTRYPOINT ["/usr/local/bin/builder"] 49 | -------------------------------------------------------------------------------- /.github/workflows/closed-issue-message.yml: -------------------------------------------------------------------------------- 1 | name: Closed Issue Message 2 | on: 3 | issues: 4 | types: [closed] 5 | jobs: 6 | auto_comment: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: aws-actions/closed-issue-message@v1 10 | with: 11 | # These inputs are both required 12 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 13 | message: | 14 | ### ⚠️COMMENT VISIBILITY WARNING⚠️ 15 | Comments on closed issues are hard for our team to see. 16 | If you need more assistance, please either tag a team member or open a new issue that references this one. 17 | If you wish to keep having a conversation with other community members under this issue feel free to do so. 18 | -------------------------------------------------------------------------------- /.github/workflows/create-channel.yml: -------------------------------------------------------------------------------- 1 | name: Create Channel 2 | 3 | # need images created at least once per branch, even if there are no docker changes 4 | # so that downstream projects can use the branch channel. 5 | on: 6 | push: 7 | branches-ignore: 8 | - 'main' 9 | paths: 10 | - '.github/workflows/create-channel.yml' 11 | - '.github/actions/**' 12 | - '.github/docker-images/**' 13 | - '.github/workflows/*.sh' 14 | - 'builder/**' 15 | create: 16 | 17 | env: 18 | AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} 19 | CRT_CI_ROLE: ${{ secrets.CRT_CI_ROLE_ARN }} 20 | AWS_DEFAULT_REGION: us-east-1 21 | 22 | permissions: 23 | id-token: write # This is required for requesting the JWT 24 | 25 | # nothing 26 | jobs: 27 | package: 28 | name: Package builder app 29 | runs-on: ubuntu-24.04 30 | 31 | steps: 32 | - uses: aws-actions/configure-aws-credentials@v4 33 | with: 34 | role-to-assume: ${{ env.CRT_CI_ROLE }} 35 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 36 | 37 | - name: Checkout Source 38 | uses: actions/checkout@v4 39 | 40 | - name: Get release tag 41 | uses: ./.github/actions/release-tag 42 | id: tag 43 | 44 | - name: Package builder to S3 45 | run: | 46 | export CHANNEL=${{ steps.tag.outputs.release_tag }} 47 | mkdir -p build 48 | mkdir -p staging 49 | cp -r builder staging/. 50 | python3 -m zipapp --python="/usr/bin/env python3" -m "builder.main:main" --output=build/builder staging 51 | aws s3 cp build/builder s3://$AWS_S3_BUCKET/channels/$CHANNEL/builder.pyz 52 | 53 | - name: Artifact builder 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: builder 57 | path: build/builder 58 | 59 | - name: Upload container CI script 60 | run: aws s3 cp ./.github/workflows/linux-container-ci.sh s3://aws-crt-test-stuff/ci/${{ steps.tag.outputs.release_tag }}/linux-container-ci.sh 61 | 62 | standard-images: 63 | name: ${{ matrix.variant.name }} 64 | runs-on: ubuntu-24.04 65 | strategy: 66 | fail-fast: false 67 | matrix: 68 | variant: 69 | - name: manylinux1-x86 70 | - name: manylinux1-x64 71 | - name: manylinux2014-x86 72 | - name: manylinux2014-x64 73 | - name: manylinux2014-aarch64 74 | - name: musllinux-1-1-aarch64 75 | - name: musllinux-1-1-x64 76 | - name: al2012-x64 77 | - name: al2-x64 78 | - name: al2023-x64 79 | - name: ubuntu-18-x64 80 | - name: ubuntu-20-x64 81 | - name: ubuntu-20-aarch64 82 | extra-build-flag: --platform=linux/aarch64 83 | - name: ubuntu-22-x64 84 | - name: node-10-linux-x64 85 | - name: swift-5-al2-x64 86 | - name: swift-5-ubuntu-x64 87 | - name: rhel8-x64 88 | - name: opensuse-leap 89 | - name: fedora-34-x64 90 | - name: raspbian-bullseye 91 | - name: alpine-3.16-x64 92 | - name: alpine-3.16-x86 93 | - name: alpine-3.16-arm64 94 | - name: alpine-3.16-armv7 95 | - name: alpine-3.16-armv6 96 | - name: openwrt-x64-openjdk8 97 | 98 | steps: 99 | - uses: aws-actions/configure-aws-credentials@v4 100 | with: 101 | role-to-assume: ${{ env.CRT_CI_ROLE }} 102 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 103 | 104 | - name: Checkout Sources 105 | uses: actions/checkout@v4 106 | 107 | - name: Get release tag 108 | uses: ./.github/actions/release-tag 109 | id: tag 110 | 111 | - name: Login to docker repo 112 | run: aws ecr get-login-password --region us-east-1 | docker login ${{ secrets.AWS_ECR_REPO }} -u AWS --password-stdin 113 | 114 | - name: Install entrypoint 115 | run: cat .github/docker-images/entrypoint.sh | sed s/version=LATEST/version=${{ steps.tag.outputs.release_tag }}/ > .github/docker-images/${{ matrix.variant.name }}/entrypoint.sh 116 | 117 | - name: Install qemu/docker 118 | run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes 119 | 120 | - name: Build ${{ matrix.variant.name }} image 121 | uses: whoan/docker-build-with-cache-action@v8 122 | with: 123 | registry: ${{ secrets.AWS_ECR_REPO }} 124 | username: ${{ steps.creds.outputs.aws-access-key-id }} 125 | password: ${{ steps.creds.outputs.aws-secret-access-key }} 126 | session: ${{ steps.creds.outputs.aws-session-token }} 127 | image_name: aws-crt-${{ matrix.variant.name }} 128 | image_tag: ${{ steps.tag.outputs.release_tag }} 129 | context: .github/docker-images/${{ matrix.variant.name }} 130 | build_extra_args: --compress=true ${{ matrix.variant.extra-build-flag }} 131 | 132 | - name: Create ECR repo if necessary 133 | run: | 134 | if ! aws --region us-east-1 ecr describe-repositories --repository-names aws-crt-${{ matrix.variant.name }} > /dev/null 2>&1; then \ 135 | aws --region us-east-1 ecr create-repository --repository-name aws-crt-${{ matrix.variant.name }}; \ 136 | fi 137 | 138 | - name: Export ${{ matrix.variant.name }} image to ECR 139 | run: | 140 | export IMAGE_TAG=${{ steps.tag.outputs.release_tag }} 141 | docker push ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:$IMAGE_TAG 142 | -------------------------------------------------------------------------------- /.github/workflows/create-release.yml: -------------------------------------------------------------------------------- 1 | name: Create Release 2 | 3 | # Make new images for every published release 4 | on: 5 | release: 6 | types: [published] 7 | workflow_dispatch: 8 | 9 | env: 10 | AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} 11 | CRT_CI_ROLE: ${{ secrets.CRT_CI_ROLE_ARN }} 12 | AWS_DEFAULT_REGION: us-east-1 13 | 14 | permissions: 15 | id-token: write # This is required for requesting the JWT 16 | 17 | jobs: 18 | package: 19 | name: Package builder app 20 | runs-on: ubuntu-24.04 21 | 22 | steps: 23 | - uses: aws-actions/configure-aws-credentials@v4 24 | with: 25 | role-to-assume: ${{ env.CRT_CI_ROLE }} 26 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 27 | 28 | - uses: aws-actions/configure-aws-credentials@v4 29 | with: 30 | role-to-assume: ${{ env.CRT_CI_ROLE }} 31 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 32 | 33 | - name: Checkout Source 34 | uses: actions/checkout@v4 35 | 36 | - name: Get release tag 37 | uses: ./.github/actions/release-tag 38 | id: tag 39 | 40 | # re-enable this when requirements.txt has actual dependencies 41 | # - name: Bundle dependencies 42 | # run: | 43 | # python3 -m pip install -r requirements.txt --target builder 44 | # rm -rf builder/*.dist-info 45 | 46 | - name: Package builder to S3 47 | run: | 48 | mkdir -p build 49 | mkdir -p staging 50 | cp -r builder staging/. 51 | python3 -m zipapp --python="/usr/bin/env python3" -m "builder.main:main" --output=build/builder staging 52 | aws s3 cp build/builder s3://$AWS_S3_BUCKET/releases/${{ steps.tag.outputs.release_tag }}/builder.pyz 53 | 54 | - name: Artifact builder 55 | uses: actions/upload-artifact@v4 56 | with: 57 | name: builder 58 | path: build/builder 59 | 60 | export-linux-images: 61 | name: Release aws-crt-${{ matrix.variant }} 62 | runs-on: ubuntu-24.04 63 | strategy: 64 | fail-fast: false 65 | matrix: 66 | variant: 67 | - name: manylinux1-x86 68 | - name: manylinux1-x64 69 | - name: manylinux2014-x86 70 | - name: manylinux2014-x64 71 | - name: manylinux2014-aarch64 72 | - name: musllinux-1-1-aarch64 73 | - name: musllinux-1-1-x64 74 | - name: al2012-x64 75 | - name: al2-x64 76 | - name: al2023-x64 77 | - name: ubuntu-18-x64 78 | - name: ubuntu-20-x64 79 | - name: ubuntu-20-aarch64 80 | extra-build-flag: --platform=linux/aarch64 81 | - name: ubuntu-22-x64 82 | - name: node-10-linux-x64 83 | - name: swift-5-al2-x64 84 | - name: swift-5-ubuntu-x64 85 | - name: rhel8-x64 86 | - name: opensuse-leap 87 | - name: fedora-34-x64 88 | - name: raspbian-bullseye 89 | - name: alpine-3.16-x64 90 | - name: alpine-3.16-x86 91 | - name: alpine-3.16-arm64 92 | - name: alpine-3.16-armv7 93 | - name: alpine-3.16-armv6 94 | - name: openwrt-x64-openjdk8 95 | 96 | steps: 97 | - uses: aws-actions/configure-aws-credentials@v4 98 | with: 99 | role-to-assume: ${{ env.CRT_CI_ROLE }} 100 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 101 | 102 | - name: Checkout Sources 103 | uses: actions/checkout@v4 104 | 105 | - name: Get release tag 106 | uses: ./.github/actions/release-tag 107 | id: tag 108 | 109 | - name: Install entrypoint 110 | run: cat .github/docker-images/entrypoint.sh | sed s/version=LATEST/version=${{ steps.tag.outputs.release_tag }}/ > .github/docker-images/${{ matrix.variant.name }}/entrypoint.sh 111 | 112 | - name: Install qemu/docker 113 | run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes 114 | 115 | - name: Build aws-crt-${{ matrix.variant.name }} image 116 | uses: whoan/docker-build-with-cache-action@v8 117 | with: 118 | registry: ${{ secrets.AWS_ECR_REPO }} 119 | username: ${{ steps.creds.outputs.aws-access-key-id }} 120 | password: ${{ steps.creds.outputs.aws-secret-access-key }} 121 | session: ${{ steps.creds.outputs.aws-session-token }} 122 | image_name: aws-crt-${{ matrix.variant.name }} 123 | image_tag: ${{ steps.tag.outputs.release_tag }} 124 | context: .github/docker-images/${{ matrix.variant.name }} 125 | build_extra_args: --compress=true ${{ matrix.variant.extra-build-flag }} 126 | 127 | - name: Login to docker repos 128 | run: | 129 | aws ecr get-login-password --region us-east-1 | docker login ${{ secrets.AWS_ECR_REPO }} -u AWS --password-stdin 130 | 131 | - name: Create ECR repo if necessary 132 | run: | 133 | if ! aws --region us-east-1 ecr describe-repositories --repository-names aws-crt-${{ matrix.variant.name }} > /dev/null 2>&1; then \ 134 | aws --region us-east-1 ecr create-repository --repository-name aws-crt-${{ matrix.variant.name }}; \ 135 | fi 136 | 137 | - name: Tag/push release images as latest 138 | run: | 139 | export IMAGE_TAG=${{ steps.tag.outputs.release_tag }} 140 | docker tag ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:$IMAGE_TAG ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:latest 141 | docker push ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:$IMAGE_TAG 142 | docker push ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:latest 143 | 144 | - name: Export aws-crt-${{ matrix.variant.name }} image to S3/releases 145 | run: | 146 | export IMAGE_TAG=${{ steps.tag.outputs.release_tag }} 147 | docker save ${{ secrets.AWS_ECR_REPO }}/aws-crt-${{ matrix.variant.name }}:$IMAGE_TAG \ 148 | | gzip \ 149 | | aws s3 cp - s3://${{env.AWS_S3_BUCKET}}/releases/$IMAGE_TAG/aws-crt-${{ matrix.variant.name }}.tar.gz 150 | aws s3 cp s3://${{env.AWS_S3_BUCKET}}/releases/$IMAGE_TAG/aws-crt-${{ matrix.variant.name }}.tar.gz s3://${{env.AWS_S3_BUCKET}}/LATEST/aws-crt-${{ matrix.variant.name }}.tar.gz 151 | 152 | upload-ci-script: 153 | name: Upload container ci script 154 | runs-on: ubuntu-24.04 155 | 156 | steps: 157 | - uses: aws-actions/configure-aws-credentials@v4 158 | with: 159 | role-to-assume: ${{ env.CRT_CI_ROLE }} 160 | aws-region: ${{ env.AWS_DEFAULT_REGION }} 161 | 162 | - name: Checkout Source 163 | uses: actions/checkout@v4 164 | 165 | - name: Get release tag 166 | uses: ./.github/actions/release-tag 167 | id: tag 168 | 169 | - name: Upload script 170 | run: aws s3 cp ./.github/workflows/linux-container-ci.sh s3://aws-crt-test-stuff/ci/${{ steps.tag.outputs.release_tag }}/linux-container-ci.sh 171 | -------------------------------------------------------------------------------- /.github/workflows/docker_buildx.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | if [ $# -lt 6 ]; then 6 | echo "Usage: $0 REGISTRY USERNAME PASSWORD IMAGE_NAME IMAGE_TAG CONTEXT (EXTRA_ARGS)" 7 | fi 8 | 9 | trim() { 10 | local var="$*" 11 | # remove leading whitespace characters 12 | var="${var#"${var%%[![:space:]]*}"}" 13 | # remove trailing whitespace characters 14 | var="${var%"${var##*[![:space:]]}"}" 15 | echo -n "$var" 16 | } 17 | 18 | INPUT_REGISTRY=$(trim $1) 19 | shift 20 | INPUT_USERNAME=$(trim $1) 21 | shift 22 | INPUT_PASSWORD=$(trim $1) 23 | shift 24 | INPUT_IMAGE_NAME=$(trim $1) 25 | shift 26 | INPUT_IMAGE_TAG=$(trim $1) 27 | shift 28 | INPUT_CONTEXT=$(trim $1) 29 | shift 30 | # gather up whatever is left 31 | INPUT_BUILD_EXTRA_ARGS="$(trim $1) $(trim $2) $(trim $3) $(trim $4) $(trim $5) $(trim $6) $(trim $7) $(trim $8) $(trim $9)" 32 | 33 | BUILDX_VERSION=v0.3.1 34 | 35 | _get_full_image_name() { 36 | echo ${INPUT_REGISTRY:+$INPUT_REGISTRY/}${INPUT_IMAGE_NAME} 37 | } 38 | 39 | install_buildx() { 40 | $(dirname $0)/install_buildx.sh 41 | } 42 | 43 | login_to_registry() { 44 | echo "${INPUT_PASSWORD}" | docker login -u "${INPUT_USERNAME}" --password-stdin "${INPUT_REGISTRY}" 45 | } 46 | 47 | build_image() { 48 | # pull previous image, ignore failure if it doesn't exist 49 | docker pull "$(_get_full_image_name)":${INPUT_IMAGE_TAG} || true 50 | # build builder target image 51 | docker build \ 52 | --build-arg BUILDKIT_INLINE_CACHE=1 \ 53 | --output=type=docker,push=true \ 54 | --tag="$(_get_full_image_name)":${INPUT_IMAGE_TAG} \ 55 | ${INPUT_BUILD_EXTRA_ARGS} \ 56 | ${INPUT_CONTEXT} 57 | } 58 | 59 | logout_from_registry() { 60 | docker logout "${INPUT_REGISTRY}" 61 | } 62 | 63 | login_to_registry 64 | install_buildx 65 | build_image 66 | logout_from_registry 67 | 68 | -------------------------------------------------------------------------------- /.github/workflows/handle-stale-discussions.yml: -------------------------------------------------------------------------------- 1 | name: HandleStaleDiscussions 2 | on: 3 | schedule: 4 | - cron: '0 */4 * * *' 5 | discussion_comment: 6 | types: [created] 7 | 8 | jobs: 9 | handle-stale-discussions: 10 | name: Handle stale discussions 11 | runs-on: ubuntu-latest 12 | permissions: 13 | discussions: write 14 | steps: 15 | - name: Stale discussions action 16 | uses: aws-github-ops/handle-stale-discussions@v1 17 | env: 18 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 19 | 20 | -------------------------------------------------------------------------------- /.github/workflows/install_buildx.sh: -------------------------------------------------------------------------------- 1 | 2 | BUILDX_VERSION=v0.3.1 3 | 4 | install_buildx() { 5 | buildx_tag=$BUILDX_VERSION 6 | docker_plugins_path=$HOME/.docker/cli-plugins 7 | buildx_release_url=https://github.com/docker/buildx/releases/download/$buildx_tag/buildx-$buildx_tag.linux-amd64 8 | 9 | mkdir -p $docker_plugins_path 10 | curl -L -0 $buildx_release_url -o $docker_plugins_path/docker-buildx 11 | chmod a+x $docker_plugins_path/docker-buildx 12 | docker buildx version 13 | } 14 | 15 | configure_buildx() { 16 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes 17 | docker buildx create --name builder --driver docker-container --platform linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 --use 18 | docker buildx inspect --bootstrap 19 | docker buildx install 20 | } 21 | 22 | install_buildx 23 | configure_buildx 24 | -------------------------------------------------------------------------------- /.github/workflows/issue-regression-labeler.yml: -------------------------------------------------------------------------------- 1 | # Apply potential regression label on issues 2 | name: issue-regression-label 3 | on: 4 | issues: 5 | types: [opened, edited] 6 | jobs: 7 | add-regression-label: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | steps: 12 | - name: Fetch template body 13 | id: check_regression 14 | uses: actions/github-script@v7 15 | env: 16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 17 | TEMPLATE_BODY: ${{ github.event.issue.body }} 18 | with: 19 | script: | 20 | const regressionPattern = /\[x\] Select this option if this issue appears to be a regression\./i; 21 | const template = `${process.env.TEMPLATE_BODY}` 22 | const match = regressionPattern.test(template); 23 | core.setOutput('is_regression', match); 24 | - name: Manage regression label 25 | env: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | run: | 28 | if [ "${{ steps.check_regression.outputs.is_regression }}" == "true" ]; then 29 | gh issue edit ${{ github.event.issue.number }} --add-label "potential-regression" -R ${{ github.repository }} 30 | else 31 | gh issue edit ${{ github.event.issue.number }} --remove-label "potential-regression" -R ${{ github.repository }} 32 | fi 33 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - 'main' 7 | 8 | jobs: 9 | autopep8: 10 | runs-on: ubuntu-24.04 11 | 12 | steps: 13 | - name: Checkout Source 14 | uses: actions/checkout@v4 15 | 16 | - name: Build and Test 17 | run: | 18 | python3 -m pip install --upgrade setuptools 19 | python3 -m pip install --upgrade autopep8 20 | python3 -m autopep8 --exit-code --diff --recursive setup.py builder 21 | -------------------------------------------------------------------------------- /.github/workflows/linux-container-ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # $1 - Builder version 3 | # $2 - Image Name 4 | 5 | set -e 6 | 7 | BUILDER_VERSION=$1 8 | shift 9 | IMAGE_NAME=$1 10 | shift 11 | 12 | aws ecr get-login-password | docker login 123124136734.dkr.ecr.us-east-1.amazonaws.com -u AWS --password-stdin 13 | export DOCKER_IMAGE=123124136734.dkr.ecr.us-east-1.amazonaws.com/${IMAGE_NAME}:${BUILDER_VERSION} 14 | docker run --env GITHUB_REF --env GITHUB_HEAD_REF --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN --env AWS_DEFAULT_REGION --env CXXFLAGS --env AWS_CRT_ARCH --env CTEST_PARALLEL_LEVEL $DOCKER_IMAGE --version=${BUILDER_VERSION} $@ 15 | -------------------------------------------------------------------------------- /.github/workflows/stale_issue.yml: -------------------------------------------------------------------------------- 1 | name: "Close stale issues" 2 | 3 | # Controls when the action will run. 4 | on: 5 | schedule: 6 | - cron: "*/60 * * * *" 7 | 8 | jobs: 9 | cleanup: 10 | runs-on: ubuntu-latest 11 | name: Stale issue job 12 | permissions: 13 | issues: write 14 | pull-requests: write 15 | steps: 16 | - uses: aws-actions/stale-issue-cleanup@v6 17 | with: 18 | # Setting messages to an empty string will cause the automation to skip 19 | # that category 20 | ancient-issue-message: Greetings! Sorry to say but this is a very old issue that is probably not getting as much attention as it deserves. We encourage you to check if this is still an issue in the latest release and if you find that this is still a problem, please feel free to open a new one. 21 | stale-issue-message: Greetings! It looks like this issue hasn’t been active in a few days. We encourage you to check if this is still an issue in the latest release. Because it has been a few days since the last update on this, and in the absence of more information, we will be closing this issue soon. If you find that this is still a problem, please feel free to provide a comment or add an upvote to prevent automatic closure, or if the issue is already closed, please feel free to open a new one. 22 | stale-pr-message: Greetings! It looks like this PR hasn’t been active in a few days, add a comment or an upvote to prevent automatic closure, or if the issue is already closed, please feel free to open a new one. 23 | 24 | # These labels are required 25 | stale-issue-label: closing-soon 26 | exempt-issue-label: automation-exempt 27 | stale-pr-label: closing-soon 28 | exempt-pr-label: pr/needs-review 29 | response-requested-label: response-requested 30 | 31 | # Don't set closed-for-staleness label to skip closing very old issues 32 | # regardless of label 33 | closed-for-staleness-label: closed-for-staleness 34 | 35 | # Issue timing 36 | days-before-stale: 2 37 | days-before-close: 5 38 | days-before-ancient: 36500 39 | 40 | # If you don't want to mark a issue as being ancient based on a 41 | # threshold of "upvotes", you can set this here. An "upvote" is 42 | # the total number of +1, heart, hooray, and rocket reactions 43 | # on an issue. 44 | minimum-upvotes-to-exempt: 1 45 | 46 | repo-token: ${{ secrets.GITHUB_TOKEN }} 47 | loglevel: DEBUG 48 | # Set dry-run to true to not perform label or close actions. 49 | dry-run: false 50 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | -------------------------------------------------------------------------------- /builder/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-crt-builder/1c53c869ba916edd216e261eaf8e45be95967ec2/builder/__init__.py -------------------------------------------------------------------------------- /builder/__main__.py: -------------------------------------------------------------------------------- 1 | from .main import main 2 | 3 | if __name__ == "__main__": 4 | main() 5 | -------------------------------------------------------------------------------- /builder/actions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-crt-builder/1c53c869ba916edd216e261eaf8e45be95967ec2/builder/actions/__init__.py -------------------------------------------------------------------------------- /builder/actions/git.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | from builder.core.action import Action 6 | from builder.core.project import Project 7 | 8 | 9 | class DownloadSource(Action): 10 | """ Downloads the source for a given project """ 11 | 12 | def __init__(self, **kwargs): 13 | self.project = kwargs['project'] 14 | self.branch = kwargs.get('branch', 'main') 15 | self.path = os.path.abspath(os.path.join( 16 | kwargs.get('path', '.'), self.project.name)) 17 | 18 | def run(self, env): 19 | if self.project.path: 20 | print('Project {} already exists on disk'.format(self.project.name)) 21 | return 22 | 23 | sh = env.shell 24 | 25 | print('Cloning {} from git'.format(self.project)) 26 | if os.path.exists(self.path): 27 | sh.rm(self.path) 28 | sh.exec("git", "clone", self.project.url, 29 | self.path, always=True, retries=3) 30 | sh.pushd(self.path) 31 | try: 32 | sh.exec("git", "checkout", self.branch, 33 | always=True, quiet=True, check=True) 34 | print('Switched to branch {}'.format(self.branch)) 35 | except: 36 | print("Project {} does not have a branch named {}, using main".format( 37 | self.project.name, self.branch)) 38 | 39 | sh.exec('git', 'submodule', 'update', 40 | '--init', '--recursive', retries=3) 41 | 42 | # reload project now that it's on disk 43 | self.project = Project.find_project(self.project.name) 44 | sh.popd() 45 | 46 | 47 | class DownloadDependencies(Action): 48 | """ Downloads the source for dependencies and consumers if necessary """ 49 | 50 | def run(self, env): 51 | project = env.project 52 | sh = env.shell 53 | branch = env.branch 54 | spec = env.spec 55 | deps = project.get_dependencies(spec) 56 | 57 | if spec.downstream: 58 | deps += project.get_consumers(spec) 59 | 60 | if deps: 61 | sh.rm(env.deps_dir) 62 | sh.mkdir(env.deps_dir) 63 | sh.pushd(env.deps_dir) 64 | 65 | while deps: 66 | dep = deps.pop(0) # pop front 67 | dep_proj = Project.find_project(dep.name) 68 | if dep_proj.path: 69 | continue 70 | 71 | dep_branch = branch if dep.revision is None else dep.revision 72 | DownloadSource(project=dep_proj, branch=dep_branch, path=env.deps_dir).run(env) 73 | 74 | # grab updated project, collect transitive dependencies/consumers 75 | dep_proj = Project.find_project(dep.name) 76 | deps = dep_proj.get_dependencies(spec) + deps # push front 77 | if spec and spec.downstream: 78 | deps += dep_proj.get_consumers(spec) # push back 79 | 80 | sh.popd() 81 | -------------------------------------------------------------------------------- /builder/actions/install.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import argparse 5 | import os 6 | import shutil 7 | import stat 8 | import sys 9 | from pathlib import Path 10 | from functools import partial 11 | 12 | from builder.core.action import Action 13 | from builder.core.host import current_os, package_tool 14 | from builder.actions.script import Script 15 | from builder.core.toolchain import Toolchain 16 | from builder.core.util import UniqueList 17 | 18 | 19 | def set_dryrun(dryrun, env): 20 | env.shell.dryrun = dryrun 21 | 22 | 23 | class InstallPackages(Action): 24 | """ Installs prerequisites to building. If packages are specified, only those packages will be installed. Otherwise, config packages will be installed. """ 25 | 26 | pkg_init_done = False 27 | 28 | def __init__(self, packages=[]): 29 | self.packages = packages 30 | 31 | def run(self, env): 32 | config = env.config 33 | sh = env.shell 34 | 35 | parser = argparse.ArgumentParser() 36 | parser.add_argument('--skip-install', action='store_true') 37 | args = parser.parse_known_args(env.args.args)[0] 38 | 39 | sudo = config.get('sudo', current_os() == 'linux') 40 | sudo = ['sudo'] if sudo else [] 41 | 42 | packages = self.packages if self.packages else config.get( 43 | 'packages', []) 44 | if packages: 45 | packages = UniqueList(packages) 46 | pkg_tool = package_tool() 47 | print('Installing packages via {}: {}'.format( 48 | pkg_tool.value, ', '.join(packages))) 49 | 50 | was_dryrun = sh.dryrun 51 | if args.skip_install: 52 | sh.dryrun = True 53 | 54 | if not InstallPackages.pkg_init_done: 55 | pkg_setup = UniqueList(config.get('pkg_setup', [])) 56 | if pkg_setup: 57 | for cmd in pkg_setup: 58 | if isinstance(cmd, str): 59 | cmd = cmd.split(' ') 60 | assert isinstance(cmd, list) 61 | sh.exec(*sudo, cmd, check=True, retries=3) 62 | 63 | pkg_update = config.get('pkg_update', None) 64 | if pkg_update: 65 | if not isinstance(pkg_update, list): 66 | pkg_update = pkg_update.split(' ') 67 | sh.exec(*sudo, pkg_update, check=True, retries=3) 68 | 69 | InstallPackages.pkg_init_done = True 70 | 71 | pkg_install = config['pkg_install'] 72 | if not isinstance(pkg_install, list): 73 | pkg_install = pkg_install.split(' ') 74 | pkg_install += packages 75 | 76 | sh.exec(*sudo, pkg_install, check=True, retries=3) 77 | 78 | if args.skip_install: 79 | sh.dryrun = was_dryrun 80 | 81 | setup_steps = env.config.get('setup_steps', []) 82 | if setup_steps: 83 | steps = [] 84 | for step in setup_steps: 85 | if not isinstance(step, list): 86 | step = step.split(' ') 87 | if step: 88 | steps.append([*sudo, *step]) 89 | if args.skip_install: 90 | return Script([partial(set_dryrun, True), *steps, 91 | partial(set_dryrun, sh.dryrun)], name='setup') 92 | 93 | return Script(steps, name='setup') 94 | 95 | 96 | # Expose compiler via environment 97 | def export_compiler(compiler, env): 98 | toolchain = env.toolchain 99 | 100 | if current_os() == 'windows' or toolchain.cross_compile: 101 | return 102 | 103 | if not env.shell.getenv('CC'): 104 | cc_path = toolchain.compiler_path() 105 | if cc_path: 106 | env.shell.setenv('CC', cc_path) 107 | else: 108 | print('WARNING: C compiler {} could not be found for export'.format(compiler)) 109 | 110 | if not env.shell.getenv('CXX'): 111 | cxx_path = toolchain.cxx_compiler_path() 112 | if cxx_path: 113 | env.shell.setenv('CXX', cxx_path) 114 | else: 115 | print('WARNING: CXX compiler {} could not be found for export'.format(compiler)) 116 | 117 | 118 | class InstallCompiler(Action): 119 | def run(self, env): 120 | config = env.config 121 | sh = env.shell 122 | if not config.get('needs_compiler'): 123 | print('Compiler is not required for current configuration, skipping.') 124 | return 125 | 126 | assert env.toolchain 127 | toolchain = env.toolchain 128 | 129 | # add dockcross as an implicit import if cross-compiling 130 | if toolchain.cross_compile: 131 | setattr(env.project, 'imports', getattr( 132 | env.project, 'imports', []) + ['dockcross']) 133 | 134 | imports = env.project.get_imports(env.spec) 135 | for imp in imports: 136 | if imp.compiler: 137 | imp.install(env) 138 | 139 | export_compiler(env.spec.compiler, env) 140 | -------------------------------------------------------------------------------- /builder/actions/mirror.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | from builder.core.action import Action 6 | from builder.core.project import Import 7 | 8 | 9 | class Mirror(Action): 10 | """ Updates mirrored dependencies in S3/CloudFront """ 11 | 12 | def is_main(self): 13 | return True 14 | 15 | def run(self, env): 16 | import_classes = Import.__subclasses__() 17 | 18 | for import_class in import_classes: 19 | imp = import_class() 20 | # only bother if the class actually implements mirror() 21 | if imp.__class__.__dict__.get('mirror', Import.__dict__['mirror']) != Import.__dict__['mirror']: 22 | print('Mirroring {}'.format(imp.name)) 23 | imp.mirror(env) 24 | -------------------------------------------------------------------------------- /builder/actions/release.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import argparse 5 | import os 6 | import re 7 | import sys 8 | 9 | from builder.core.action import Action 10 | from builder.actions.script import Script 11 | 12 | 13 | def print_release_notes(env): 14 | sh = env.shell 15 | 16 | def warn(msg): 17 | if args.ignore_warnings: 18 | print("[WARNING]", msg) 19 | else: 20 | print(msg + "\nrun with --ignore-warnings to proceed anyway") 21 | sys.exit(1) 22 | 23 | def get_all_tags(): 24 | git_output = sh.exec('git', 'show-ref', '--tags', quiet=True).output 25 | tags = [] 26 | for line in git_output.splitlines(): 27 | # line looks like: "e18f041a0c8d17189f2eae2a32f16e0a7a3f0f1c refs/tags/v0.5.18" 28 | match = re.match( 29 | r'([a-f0-9]+) refs/tags/(v([0-9]+)\.([0-9]+)\.([0-9]+))', line) 30 | if not match: 31 | # skip malformed release tags 32 | continue 33 | tags.append({ 34 | 'commit': match.group(1), 35 | 'str': match.group(2), 36 | 'num_tuple': (int(match.group(3)), int(match.group(4)), int(match.group(5))), 37 | }) 38 | # sort highest version first 39 | return sorted(tags, reverse=True, key=lambda tag: tag['num_tuple']) 40 | 41 | def get_tag_for_commit(tags, commit): 42 | for tag in tags: 43 | if tag['commit'] == commit: 44 | return tag 45 | 46 | def get_current_commit(): 47 | git_output = sh.exec('git', 'rev-parse', 'HEAD', quiet=True).output 48 | return git_output.splitlines()[0] 49 | 50 | parser = argparse.ArgumentParser( 51 | description="Help gather release notes for CRTs") 52 | parser.add_argument('--ignore-warnings', 53 | action='store_true', help="ignore warnings") 54 | args = parser.parse_known_args(env.args.args)[0] 55 | 56 | package_path = sh.cwd() 57 | 58 | print('Syncing to latest main...') 59 | sh.exec('git', 'checkout', 'main', quiet=True) 60 | sh.exec('git', 'pull', quiet=True) 61 | sh.exec('git', 'submodule', 'update', '--init', quiet=True) 62 | 63 | print('Gathering info...') 64 | package_tags = get_all_tags() 65 | package_latest_tag = package_tags[0] 66 | 67 | package_changes = sh.exec( 68 | 'git', 'log', package_latest_tag['commit'] + '..', quiet=True).output 69 | if not package_changes: 70 | print('No changes since last release', package_latest_tag['str']) 71 | sys.exit(0) 72 | 73 | submodules_root_path = os.path.join(package_path, 'aws-common-runtime') 74 | submodules = [] 75 | if os.path.exists(submodules_root_path): 76 | submodule_names = sorted(os.listdir(submodules_root_path)) 77 | for submodule_name in submodule_names: 78 | submodule = {'name': submodule_name, 79 | 'path': os.path.join(submodules_root_path, submodule_name)} 80 | if submodule_name == 's2n' or not os.path.isdir(submodule['path']): 81 | continue 82 | submodules.append(submodule) 83 | sh.cd(submodule['path'], quiet=True) 84 | sh.exec('git', 'fetch', quiet=True) 85 | submodule['tags'] = get_all_tags() 86 | submodule['current_commit'] = get_current_commit() 87 | submodule['current_tag'] = get_tag_for_commit( 88 | submodule['tags'], submodule['current_commit']) 89 | newest_tag = submodule['tags'][0] 90 | if submodule['current_tag'] != newest_tag: 91 | warn('{} not at newest release: {} < {}'.format( 92 | submodule_name, 93 | submodule['current_tag']['str'], 94 | newest_tag['str'])) 95 | 96 | print('Syncing to previous release {}...'.format( 97 | package_latest_tag['str'])) 98 | sh.cd(package_path, quiet=True) 99 | sh.exec('git', 'checkout', package_latest_tag['str'], quiet=True) 100 | sh.exec('git', 'submodule', 'update', '--init', quiet=True) 101 | 102 | print('Gathering info...') 103 | for submodule in submodules: 104 | sh.cd(submodule['path'], quiet=True) 105 | submodule['prev_commit'] = get_current_commit() 106 | submodule['prev_tag'] = get_tag_for_commit( 107 | submodule['tags'], submodule['prev_commit']) 108 | 109 | print('Syncing back to latest...') 110 | sh.exec('git', 'checkout', 'main', quiet=True) 111 | sh.exec('git', 'submodule', 'update', '--init', quiet=True) 112 | 113 | if submodules: 114 | print('------ Submodule changes ------') 115 | else: 116 | print('No submodules found') 117 | for submodule in submodules: 118 | # Special warning about API breakages 119 | major_change = False 120 | if submodule['current_tag']['num_tuple'][0] == 0: 121 | if submodule['prev_tag']['num_tuple'][1] != submodule['current_tag']['num_tuple'][1]: 122 | major_change = True 123 | elif submodule['prev_tag']['num_tuple'][0] != submodule['current_tag']['num_tuple'][0]: 124 | major_change = True 125 | if major_change: 126 | print('MAJOR CHANGE: {} {} -> {}'.format( 127 | submodule['name'], 128 | submodule['prev_tag']['str'], 129 | submodule['current_tag']['str'])) 130 | 131 | # Link to release notes 132 | # We can't just dump text because these are a github thing, not a git thing 133 | for tag in submodule['tags']: 134 | if tag == submodule['prev_tag']: 135 | break 136 | print( 137 | 'https://github.com/awslabs/{}/releases/tag/{}'.format(submodule['name'], tag['str'])) 138 | 139 | print('------ Package changes ------') 140 | print(package_changes) 141 | 142 | 143 | class ReleaseNotes(Action): 144 | def is_main(self): 145 | return True 146 | 147 | def run(self, env): 148 | print_release_notes(env) 149 | -------------------------------------------------------------------------------- /builder/actions/script.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import sys 5 | from builder.core.action import Action 6 | from builder.core.scripts import Scripts 7 | from builder.core.util import replace_variables, to_list 8 | 9 | 10 | class Script(Action): 11 | """ A build step that runs a series of shell commands or python functions """ 12 | 13 | def __init__(self, commands, **kwargs): 14 | self.commands = commands 15 | self.name = kwargs.get('name', self.__class__.__name__) 16 | 17 | def run(self, env): 18 | sh = env.shell 19 | 20 | def _expand_vars(cmd): 21 | cmd_type = type(cmd) 22 | if cmd_type == str: 23 | cmd = replace_variables(cmd, env.config['variables']) 24 | cmd = replace_variables(cmd, env.variables) 25 | elif cmd_type == list: 26 | cmd = [replace_variables(sub, env.config['variables']) for sub in cmd] 27 | cmd = [replace_variables(sub, env.variables) for sub in cmd] 28 | return cmd 29 | 30 | # Interpolate any variables 31 | self.commands = [_expand_vars(cmd) for cmd in self.commands] 32 | 33 | # Run each of the commands 34 | children = [] 35 | for cmd in self.commands: 36 | cmd_type = type(cmd) 37 | # See if the string is actually an action 38 | if cmd_type == str: 39 | action_cls = Scripts.find_action(cmd) 40 | if action_cls: 41 | cmd = action_cls() 42 | cmd_type = type(cmd) 43 | 44 | if cmd_type == str: 45 | result = sh.exec(*cmd.split(' ')) 46 | if result.returncode != 0: 47 | print('Command failed, exiting') 48 | sys.exit(12) 49 | elif cmd_type == list: 50 | result = sh.exec(*cmd) 51 | if result.returncode != 0: 52 | print('Command failed, exiting') 53 | sys.exit(12) 54 | elif isinstance(cmd, Action): 55 | Scripts.run_action(cmd, env) 56 | elif callable(cmd): 57 | children += to_list(cmd(env)) 58 | else: 59 | print('Unknown script sub command: {}: {}', cmd_type, cmd) 60 | sys.exit(4) 61 | return children 62 | 63 | def __str__(self): 64 | if len(self.commands) == 0: 65 | return '{}'.format(self.name) 66 | if self.name != self.__class__.__name__: 67 | return '{}'.format(self.name) 68 | 69 | cmds = [] 70 | for cmd in self.commands: 71 | cmd_type = type(cmd) 72 | if cmd_type == str: 73 | cmds.append(cmd) 74 | elif cmd_type == list: 75 | cmds.append(' '.join(cmd)) 76 | elif isinstance(cmd, Action): 77 | cmds.append(str(cmd)) 78 | elif callable(cmd): 79 | cmds.append(cmd.__name__) 80 | else: 81 | cmds.append("UNKNOWN: {}".format(cmd)) 82 | return '{}: (\n{}\n)'.format(self.name, '\n\t'.join(cmds)) 83 | -------------------------------------------------------------------------------- /builder/actions/setup_cross_ci_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper functions for setting up CI that is not necessarily environment variable related. 3 | """ 4 | 5 | from builder.actions.install import InstallPackages 6 | import re 7 | import os 8 | 9 | ################################################################################ 10 | # Windows Certificate Store 11 | 12 | 13 | def create_windows_cert_store(env, certificate_env, location_env): 14 | windows_certificate_location = "CurrentUser\\My" 15 | windows_certificate_folder = "Cert:\\" + windows_certificate_location 16 | 17 | # Is the environment variable set? 18 | if (env.shell.getenv(certificate_env) == None): 19 | print(f"Windows Cert Setup: {certificate_env} not set. Skipping...") 20 | return 21 | pfx_cert_path = env.shell.getenv(certificate_env) 22 | 23 | # Import the PFX into the Windows Certificate Store 24 | # (Passing '$mypwd' is required even though it is empty and our certificate has no password. It fails CI otherwise) 25 | import_pfx_arguments = [ 26 | # Powershell 7.3 introduced an issue where launching powershell from cmd would not set PSModulePath correctly. 27 | # As a workaround, we set `PSModulePath` to empty so powershell would automatically reset the PSModulePath to default. 28 | # More details: https://github.com/PowerShell/PowerShell/issues/18530 29 | "$env:PSModulePath = '';", 30 | "Import-PfxCertificate", 31 | "-FilePath", pfx_cert_path, 32 | "-CertStoreLocation", windows_certificate_folder] 33 | import_result = env.shell.exec("powershell.exe", import_pfx_arguments, check=True) 34 | 35 | # Get the certificate thumbprint from the output: 36 | import_pfx_output = str(import_result.output) 37 | # We know the Thumbprint will always be 40 characters long, so we can find it using that 38 | # TODO: Extract this using a better, more fool-proof method 39 | thumbprint = "" 40 | current_str = "" 41 | # The input comes as a string with some special characters still included, so we need to remove them! 42 | import_pfx_output = import_pfx_output.replace("\\r", " ") 43 | import_pfx_output = import_pfx_output.replace("\\n", "\n") 44 | for i in range(0, len(import_pfx_output)): 45 | if (import_pfx_output[i] == " " or import_pfx_output[i] == "\n"): 46 | if (len(current_str) == 40): 47 | thumbprint = current_str 48 | break 49 | current_str = "" 50 | else: 51 | current_str += import_pfx_output[i] 52 | if (thumbprint == ""): 53 | print(f"Windows Cert Setup: {certificate_env} - ERROR - could not find certificate thumbprint") 54 | return 55 | env.shell.setenv(location_env, windows_certificate_location + "\\" + thumbprint) 56 | 57 | ################################################################################ 58 | # PKCS11 59 | 60 | 61 | def create_pkcs11_environment(env, pkcs8key, pkcs8cert, ca_file): 62 | # try to install softhsm 63 | try: 64 | softhsm_install_acion = InstallPackages(['softhsm']) 65 | softhsm_install_acion.run(env) 66 | except: 67 | print("WARNING: softhsm could not be installed. PKCS#11 tests are disabled") 68 | return 69 | 70 | softhsm_lib = _find_softhsm_lib() 71 | if softhsm_lib is None: 72 | print("WARNING: libsofthsm2.so not found. PKCS#11 tests are disabled") 73 | return 74 | 75 | # put SoftHSM config file and token directory under the build dir. 76 | softhsm2_dir = os.path.join(env.build_dir, 'softhsm2') 77 | conf_path = os.path.join(softhsm2_dir, 'softhsm2.conf') 78 | token_dir = os.path.join(softhsm2_dir, 'tokens') 79 | env.shell.mkdir(token_dir) 80 | _setenv(env, 'SOFTHSM2_CONF', conf_path) 81 | with open(conf_path, 'w') as conf_file: 82 | conf_file.write(f"directories.tokendir = {token_dir}\n") 83 | 84 | # print SoftHSM version 85 | _exec_softhsm2_util(env, '--version') 86 | 87 | # bail out if softhsm is too old 88 | # 2.1.0 is a known offender that crashes on exit if C_Finalize() isn't called 89 | if _get_softhsm2_version(env) < (2, 2, 0): 90 | print("WARNING: SoftHSM2 installation is too old. PKCS#11 tests are disabled") 91 | return 92 | 93 | # create a token 94 | _exec_softhsm2_util( 95 | env, 96 | '--init-token', 97 | '--free', # use any free slot 98 | '--label', 'my-test-token', 99 | '--pin', '0000', 100 | '--so-pin', '0000') 101 | 102 | # we need to figure out which slot the new token is in because: 103 | # 1) old versions of softhsm2-util make you pass --slot 104 | # (instead of accepting --token like newer versions) 105 | # 2) newer versions of softhsm2-util reassign new tokens to crazy 106 | # slot numbers (instead of simply using 0 like older versions) 107 | slot = _get_token_slots(env)[0] 108 | 109 | # add private key to token 110 | _exec_softhsm2_util( 111 | env, 112 | '--import', pkcs8key, 113 | '--slot', str(slot), 114 | '--label', 'my-test-key', 115 | '--id', 'BEEFCAFE', # ID is hex (3203386110) 116 | '--pin', '0000') 117 | 118 | # for logging's sake, print the new state of things 119 | _exec_softhsm2_util(env, '--show-slots', '--pin', '0000') 120 | 121 | # set env vars for tests 122 | _setenv(env, 'AWS_TEST_PKCS11_LIB', softhsm_lib) 123 | _setenv(env, 'AWS_TEST_PKCS11_TOKEN_LABEL', 'my-test-token') 124 | _setenv(env, 'AWS_TEST_PKCS11_PIN', '0000') 125 | _setenv(env, 'AWS_TEST_PKCS11_PKEY_LABEL', 'my-test-key') 126 | _setenv(env, 'AWS_TEST_PKCS11_CERT_FILE', pkcs8cert) 127 | _setenv(env, 'AWS_TEST_PKCS11_CA_FILE', ca_file) 128 | 129 | 130 | def _setenv(env, var, value): 131 | """ 132 | Set environment variable now, 133 | and ensure the environment variable is set again when tests run 134 | """ 135 | env.shell.setenv(var, value) 136 | env.project.config['test_env'][var] = value 137 | 138 | 139 | def _find_softhsm_lib(): 140 | """Return path to SoftHSM2 shared lib, or None if not found""" 141 | 142 | # note: not using `ldconfig --print-cache` to find it because 143 | # some installers put it in weird places where ldconfig doesn't look 144 | # (like in a subfolder under lib/) 145 | 146 | for lib_dir in ['lib64', 'lib']: # search lib64 before lib 147 | for base_dir in ['/usr/local', '/usr', '/', ]: 148 | search_dir = os.path.join(base_dir, lib_dir) 149 | for root, dirs, files in os.walk(search_dir): 150 | for file_name in files: 151 | if 'libsofthsm2.so' in file_name: 152 | return os.path.join(root, file_name) 153 | return None 154 | 155 | 156 | def _exec_softhsm2_util(env, *args, **kwargs): 157 | if not 'check' in kwargs: 158 | kwargs['check'] = True 159 | 160 | result = env.shell.exec('softhsm2-util', *args, **kwargs) 161 | 162 | # older versions of softhsm2-util (2.1.0 is a known offender) 163 | # return error code 0 and print the help if invalid args are passed. 164 | # This should be an error. 165 | # 166 | # invalid args can happen because newer versions of softhsm2-util 167 | # support more args than older versions, so what works on your 168 | # machine might not work on some ancient docker image. 169 | if 'Usage: softhsm2-util' in result.output: 170 | raise Exception('softhsm2-util failed') 171 | 172 | return result 173 | 174 | 175 | def _get_token_slots(env): 176 | """Return array of IDs for slots with initialized tokens""" 177 | token_slot_ids = [] 178 | 179 | output = _exec_softhsm2_util(env, '--show-slots', quiet=True).output 180 | 181 | # --- output looks like --- 182 | # Available slots: 183 | # Slot 0 184 | # Slot info: 185 | # ... 186 | # Token present: yes 187 | # Token info: 188 | # ... 189 | # Initialized: yes 190 | current_slot = None 191 | current_info_block = None 192 | for line in output.splitlines(): 193 | # check for start of "Slot " block 194 | m = re.match(r"Slot ([0-9]+)", line) 195 | if m: 196 | current_slot = int(m.group(1)) 197 | current_info_block = None 198 | continue 199 | 200 | if current_slot is None: 201 | continue 202 | 203 | # check for start of next indented block, like "Token info" 204 | m = re.match(r" ([^ ].*)", line) 205 | if m: 206 | current_info_block = m.group(1) 207 | continue 208 | 209 | if current_info_block is None: 210 | continue 211 | 212 | # if we're in token block, check for "Initialized: yes" 213 | if "Token info" in current_info_block: 214 | if re.match(r" *Initialized: *yes", line): 215 | token_slot_ids.append(current_slot) 216 | 217 | return token_slot_ids 218 | 219 | 220 | def _get_softhsm2_version(env): 221 | output = _exec_softhsm2_util(env, '--version').output 222 | match = re.match(r'([0-9+])\.([0-9]+).([0-9]+)', output) 223 | return (int(match.group(1)), int(match.group(2)), int(match.group(3))) 224 | 225 | ################################################################################ 226 | -------------------------------------------------------------------------------- /builder/actions/setup_event_stream_echo_server.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.action import Action 5 | import atexit 6 | import os 7 | import subprocess 8 | 9 | """ 10 | A builder action used by device SDK repositories to set up an event stream echo server for CI testing. 11 | """ 12 | 13 | 14 | class SetupEventStreamEchoServer(Action): 15 | """ Set up an event stream echo server for testing eventstream bindings """ 16 | 17 | def __init__(self): 18 | pass 19 | 20 | def _build_and_run_eventstream_echo_server(self, env): 21 | java_sdk_dir = None 22 | 23 | try: 24 | env.shell.exec(["mvn", "--version"], check=True) 25 | 26 | # maven is installed, so this is a configuration we can start an event stream echo server 27 | java_sdk_dir = env.shell.mktemp() 28 | print(f"******************Cloning java device sdk into {java_sdk_dir}") 29 | 30 | env.shell.exec(["git", "clone", "https://github.com/aws/aws-iot-device-sdk-java-v2"], 31 | working_dir=java_sdk_dir, check=True) 32 | 33 | sdk_dir = os.path.join(java_sdk_dir, "aws-iot-device-sdk-java-v2", "sdk") 34 | env.shell.pushd(sdk_dir) 35 | 36 | try: 37 | # The EchoTest server is in test-only code 38 | env.shell.exec(["mvn", "test-compile"], check=True) 39 | 40 | env.shell.exec(["mvn", "dependency:build-classpath", "-Dmdep.outputFile=classpath.txt"], check=True) 41 | 42 | with open('classpath.txt', 'r') as file: 43 | classpath = file.read() 44 | 45 | test_class_path = os.path.join(sdk_dir, "target", "test-classes") 46 | target_class_path = os.path.join(sdk_dir, "target", "classes") 47 | directory_separator = os.pathsep 48 | 49 | echo_server_command = ["java", "-classpath", 50 | f"{test_class_path}{directory_separator}{target_class_path}{directory_separator}{classpath}", "software.amazon.awssdk.eventstreamrpc.echotest.EchoTestServiceRunner", "127.0.0.1", "8033"] 51 | 52 | print(f'Echo server command: {echo_server_command}') 53 | 54 | # bypass builder's exec wrapper since it doesn't allow for background execution 55 | proc = subprocess.Popen(echo_server_command) 56 | 57 | @atexit.register 58 | def _terminate_echo_server(): 59 | proc.terminate() 60 | proc.wait() 61 | 62 | env.shell.setenv("AWS_TEST_EVENT_STREAM_ECHO_SERVER_HOST", "127.0.0.1", quiet=False) 63 | env.shell.setenv("AWS_TEST_EVENT_STREAM_ECHO_SERVER_PORT", "8033", quiet=False) 64 | finally: 65 | env.shell.popd() 66 | 67 | except Exception as ex: 68 | print(f'Failed to set up event stream server: {ex}. Eventstream CI tests will not be run.') 69 | 70 | return java_sdk_dir 71 | 72 | def run(self, env): 73 | return self._build_and_run_eventstream_echo_server(env) 74 | -------------------------------------------------------------------------------- /builder/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-crt-builder/1c53c869ba916edd216e261eaf8e45be95967ec2/builder/core/__init__.py -------------------------------------------------------------------------------- /builder/core/action.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | 5 | class Action(object): 6 | """ A build step """ 7 | 8 | def is_main(self): 9 | """ Returns True if this action needs no external tasks run to set it up """ 10 | return False 11 | 12 | def run(self, env): 13 | pass 14 | 15 | def __str__(self): 16 | return self.__class__.__name__ 17 | -------------------------------------------------------------------------------- /builder/core/api.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.vmod import VirtualModule 5 | from builder.core.shell import Shell 6 | from builder.core.env import Env 7 | from builder.core.action import Action 8 | from builder.core.scripts import Scripts 9 | from builder.core.project import Project, Import 10 | from builder.actions.cmake import CMakeBuild, CTestRun 11 | from builder.actions.git import DownloadSource, DownloadDependencies 12 | from builder.actions.install import InstallPackages, InstallCompiler 13 | from builder.actions.script import Script 14 | from builder.core.toolchain import Toolchain 15 | from builder.core import host 16 | from builder.core import util 17 | from builder.actions.setup_cross_ci_crt_environment import SetupCrossCICrtEnvironment 18 | from builder.actions.setup_event_stream_echo_server import SetupEventStreamEchoServer 19 | 20 | 21 | class Host(object): 22 | current_os = host.current_os 23 | current_arch = host.current_arch 24 | current_host = host.current_host 25 | 26 | 27 | class Util(object): 28 | where = util.where 29 | run_command = util.run_command 30 | 31 | 32 | class Builder(VirtualModule): 33 | """ The interface available to scripts that define projects, builds, actions, or configuration """ 34 | 35 | Shell = Shell 36 | Env = Env 37 | Action = Action 38 | 39 | Project = Project 40 | Import = Import 41 | Toolchain = Toolchain 42 | 43 | Host = Host 44 | 45 | # Actions 46 | CMakeBuild = CMakeBuild 47 | CTestRun = CTestRun 48 | DownloadDependencies = DownloadDependencies 49 | DownloadSource = DownloadSource 50 | InstallTools = InstallPackages # backward compat, deprecated 51 | InstallPackages = InstallPackages 52 | InstallCompiler = InstallCompiler 53 | Script = Script 54 | SetupCrossCICrtEnvironment = SetupCrossCICrtEnvironment 55 | SetupEventStreamEchoServer = SetupEventStreamEchoServer 56 | 57 | Util = Util 58 | -------------------------------------------------------------------------------- /builder/core/env.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | import subprocess 6 | import sys 7 | 8 | from builder.actions.git import DownloadSource 9 | from builder.core.project import Project 10 | from builder.core.shell import Shell 11 | 12 | 13 | class Env(object): 14 | """ Encapsulates the environment in which the build is running """ 15 | 16 | def __init__(self, config=None): 17 | if config is None: 18 | config = {} 19 | 20 | # DEFAULTS 21 | self.dryrun = False # overwritten by config 22 | 23 | env = self 24 | 25 | class Variables(dict): 26 | def __setitem__(self, item, value): 27 | super().__setitem__(item, value) 28 | env._publish_variable(item, value) 29 | self.variables = Variables() 30 | 31 | # OVERRIDES: copy incoming config, overwriting defaults 32 | for key, val in config.items(): 33 | setattr(self, key, val) 34 | 35 | # default the branch to whatever the current dir+git says it is 36 | self.branch = getattr(self, 'branch', None) or self._get_git_branch() 37 | 38 | # make sure the shell is initialized 39 | if not hasattr(self, 'shell'): 40 | self.shell = Shell(self.dryrun) 41 | 42 | # build environment set up 43 | self.launch_dir = os.path.abspath(self.shell.cwd()) 44 | 45 | Project.search_dirs = [ 46 | self.launch_dir, 47 | ] 48 | 49 | # default the project to whatever can be found, or convert 50 | # project from a name to a Project 51 | if not getattr(self, 'project', None): 52 | self.project = Project.default_project() 53 | 54 | # Project provided via args, locate it 55 | if self.args.project: 56 | project_name = self.args.project 57 | 58 | # see if the project is a path, if so, split it and give the path as a hint 59 | hints = [] 60 | parts = project_name.split(os.path.sep) 61 | if len(parts) > 1: 62 | project_path = os.path.abspath(os.path.join(*parts)) 63 | hints += [project_path] 64 | project_name = parts[-1] 65 | 66 | # Ensure that the specified project exists, this may return a ref or the project if 67 | # it is present on disk 68 | project = Project.find_project(project_name, hints=hints) 69 | if not project.path: # got a ref 70 | print('Project {} could not be found locally, downloading'.format(project.name)) 71 | DownloadSource(project=project, branch=self.branch, path='.').run(self) 72 | 73 | # Now that the project is downloaded, look it up again 74 | project = Project.find_project(project.name, hints=[os.path.abspath('.')]) 75 | assert project.resolved() 76 | 77 | self.project = project 78 | 79 | if not self.project or not self.project.resolved(): 80 | return 81 | 82 | # Build the config object 83 | self.project.use_variant(self.variant) 84 | self.config = self.project.get_config(self.spec, self.args.cli_config) 85 | 86 | # Once initialized, switch to the source dir before running actions 87 | self.root_dir = os.path.abspath(self.project.path) 88 | self.variables['root_dir'] = self.root_dir 89 | self.shell.cd(self.root_dir) 90 | 91 | # Allow these to be overridden by the project, and relative to source_dir if not absolute paths 92 | build_dir = self.config.get('build_dir', os.path.join(self.root_dir, 'build')) 93 | self.build_dir = os.path.abspath(build_dir) 94 | self.variables['build_dir'] = self.build_dir 95 | 96 | deps_dir = self.config.get('deps_dir', os.path.join(self.build_dir, 'deps')) 97 | self.deps_dir = os.path.abspath(deps_dir) 98 | self.variables['deps_dir'] = self.deps_dir 99 | 100 | install_dir = self.config.get('install_dir', os.path.join(self.build_dir, 'install')) 101 | self.install_dir = os.path.abspath(install_dir) 102 | self.variables['install_dir'] = self.install_dir 103 | 104 | # modify environment so that downstream tests can find any shared libs we may build 105 | if sys.platform == 'win32': 106 | self.shell.addpathenv('PATH', os.path.abspath(os.path.join(self.install_dir, 'bin'))) 107 | else: 108 | self.shell.addpathenv('LD_LIBRARY_PATH', os.path.abspath(os.path.join(self.install_dir, 'lib64'))) 109 | self.shell.addpathenv('LD_LIBRARY_PATH', os.path.abspath(os.path.join(self.install_dir, 'lib'))) 110 | 111 | print('Root directory: {}'.format(self.root_dir)) 112 | print('Build directory: {}'.format(self.build_dir)) 113 | 114 | Project.search_dirs += [ 115 | self.build_dir, 116 | self.root_dir, 117 | self.deps_dir, 118 | ] 119 | 120 | # set up build environment 121 | if os.path.exists(self.build_dir): 122 | self.shell.rm(self.build_dir) 123 | self.shell.mkdir(self.build_dir) 124 | 125 | def _publish_variable(self, var, value): 126 | Project._publish_variable(var, value) 127 | 128 | @staticmethod 129 | def _get_git_branch(): 130 | travis_pr_branch = os.environ.get("TRAVIS_PULL_REQUEST_BRANCH") 131 | if travis_pr_branch: 132 | print("Found branch:", travis_pr_branch) 133 | return travis_pr_branch 134 | 135 | # NOTE: head_ref only set for pull_request events 136 | # see: https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables 137 | github_head_ref = os.environ.get("GITHUB_HEAD_REF") 138 | github_ref = os.environ.get("GITHUB_REF") 139 | if github_head_ref: 140 | # if we are triggered from a PR then we are in a detached head state (e.g. `refs/pull/:prNumber/merge`) 141 | # and we need to grab the branch being merged from 142 | # see: https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request 143 | branch = github_head_ref 144 | print("Found github ref for PR from: {}".format(branch)) 145 | return branch 146 | elif github_ref: 147 | origin_str = "refs/heads/" 148 | if github_ref.startswith(origin_str): 149 | branch = github_ref[len(origin_str):] 150 | print("Found github ref: {}".format(branch)) 151 | return branch 152 | 153 | try: 154 | branch_output = subprocess.check_output( 155 | ["git", "branch", "-a", "--contains", "HEAD"]).decode("utf-8") 156 | branches_unfiltered = [branch.strip() for branch in branch_output.splitlines()] 157 | print("Found branches:", branches_unfiltered) 158 | 159 | branches = [] 160 | star_branch = None 161 | for line in branches_unfiltered: 162 | branch = line.lstrip('*').strip() 163 | 164 | # eliminate candidates like "(no branch)" and "(HEAD detached at 1dd6804)" 165 | if branch.startswith('('): 166 | continue 167 | 168 | if line.startswith('*'): 169 | star_branch = branch 170 | 171 | branches.append(branch) 172 | 173 | # if git branch says we're on a branch, that's it 174 | if star_branch: 175 | print('Working in branch: {}'.format(star_branch)) 176 | return star_branch 177 | 178 | # pick the first one (it should be the only one, if it's a fresh sync) 179 | for branch in branches: 180 | origin_str = "remotes/origin/" 181 | if branch.startswith(origin_str): 182 | branch = branch[len(origin_str):] 183 | 184 | print('Working in branch: {}'.format(branch)) 185 | return branch 186 | except: 187 | print("Current directory ({}) is not a git repository".format(os.getcwd())) 188 | 189 | # git symbolic-ref --short HEAD 190 | return 'main' 191 | -------------------------------------------------------------------------------- /builder/core/host.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.data import ARCHS, HOSTS, PKG_TOOLS 5 | 6 | import os 7 | import re 8 | import sys 9 | 10 | from functools import lru_cache 11 | 12 | 13 | def current_os(): 14 | if sys.platform == 'win32': 15 | return 'windows' 16 | elif sys.platform == 'darwin': 17 | return 'macos' 18 | elif 'linux' in sys.platform or sys.platform in ('cygwin', 'msys'): 19 | return 'linux' 20 | elif sys.platform.startswith('freebsd'): 21 | return 'freebsd' 22 | elif sys.platform.startswith('openbsd'): 23 | return 'openbsd' 24 | return 'UNKNOWN' 25 | 26 | 27 | def current_arch(): 28 | if current_os() == 'linux' or current_os() == 'macos': 29 | machine_id = os.uname()[4] 30 | m = re.match(r'^(aarch64|armv[6-8]|arm64)', machine_id.strip()) 31 | if m: 32 | arch = m.group(1) 33 | if arch == 'aarch64': 34 | arch = 'armv8' 35 | return arch 36 | return 'x64' if sys.maxsize > 2**32 else 'x86' 37 | 38 | 39 | def current_platform(): 40 | return '{}-{}'.format(current_os(), current_arch()) 41 | 42 | 43 | def normalize_arch(arch): 44 | return ARCHS[arch]['arch'] 45 | 46 | 47 | def normalize_target(target): 48 | """ convert target into canonical os and arch """ 49 | assert '-' in target 50 | os, arch = target.split('-') 51 | arch = normalize_arch(arch) 52 | return '{}-{}'.format(os, arch) 53 | 54 | 55 | def _file_contains(path, search): 56 | if os.path.isfile(path): 57 | with open(path) as f: 58 | line = f.readline() 59 | while line: 60 | if search in line: 61 | return True 62 | line = f.readline() 63 | return False 64 | 65 | 66 | @lru_cache(1) 67 | def current_host(): 68 | """ Between sys.platform or linux distro identifiers, determine the specific os """ 69 | 70 | def _discover_host(): 71 | platform = current_os() 72 | if platform == 'linux': 73 | # Note: that AL2 and AL2023 have the same substring. Check for AL2023 explicitly. 74 | # And also check that AL2 has "2 (", which is common to all base distributions of AL2 75 | if _file_contains('/etc/system-release', 'Amazon Linux release 2023'): 76 | return 'al2023' 77 | if _file_contains('/etc/system-release', 'Amazon Linux release 2 ('): 78 | return 'al2' 79 | if _file_contains('/etc/system-release', 'Bare Metal') or _file_contains('/etc/system-release', 'Amazon Linux AMI'): 80 | return 'al2012' 81 | if _file_contains('/etc/redhat-release', 'CentOS release 5.'): 82 | if os.path.exists('/opt/python/cp27-cp27m'): 83 | return 'manylinux' 84 | return 'centos' 85 | if _file_contains('/etc/redhat-release', 'CentOS Linux release 7.'): 86 | if os.path.exists('/opt/python/cp39-cp39'): 87 | return 'manylinux' 88 | return 'centos' 89 | if _file_contains('/etc/lsb-release', 'Ubuntu'): 90 | return 'ubuntu' 91 | if _file_contains('/etc/os-release', 'Debian'): 92 | return 'debian' 93 | if _file_contains('/etc/os-release', 'Alpine Linux'): 94 | if os.path.exists('/opt/python/cp39-cp39'): 95 | return 'musllinux' 96 | return 'alpine' 97 | if _file_contains('/etc/os-release', 'Raspbian'): 98 | return 'raspbian' 99 | if _file_contains('/etc/system-release', 'Fedora'): 100 | return 'fedora' 101 | if _file_contains('/etc/os-release', 'openSUSE'): 102 | return 'opensuse' 103 | if _file_contains('/etc/os-release', 'Red Hat Enterprise Linux'): 104 | return 'rhel' 105 | if _file_contains('/etc/os-release', 'OpenWrt'): 106 | return 'openwrt' 107 | return 'linux' 108 | else: 109 | return platform 110 | return _discover_host() 111 | 112 | 113 | def package_tool(host=current_host()): 114 | host_info = HOSTS.get(host, {}) 115 | return host_info['pkg_tool'] 116 | -------------------------------------------------------------------------------- /builder/core/scripts.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import importlib 5 | import importlib.util 6 | import glob 7 | import os 8 | import sys 9 | 10 | 11 | Action = None 12 | Project = None 13 | Import = None 14 | 15 | 16 | def _import_dynamic_classes(): 17 | # Must late import project to avoid cyclic dependency 18 | global Action 19 | global Project 20 | global Import 21 | 22 | import builder.core.project as project 23 | import builder.core.action as action 24 | Project = getattr(project, 'Project') 25 | Import = getattr(project, 'Import') 26 | Action = getattr(action, 'Action') 27 | 28 | 29 | def _find_all_subclasses(cls): 30 | """ Recursively find all subclasses """ 31 | subclasses = set(cls.__subclasses__()) 32 | for sub in cls.__subclasses__(): 33 | subclasses = subclasses | _find_all_subclasses(sub) 34 | return subclasses 35 | 36 | 37 | def _get_all_dynamic_classes(): 38 | _import_dynamic_classes() 39 | all_classes = set( 40 | _find_all_subclasses(Action) | 41 | _find_all_subclasses(Project) | 42 | _find_all_subclasses(Import)) 43 | return all_classes 44 | 45 | 46 | def _normalize_name(name): 47 | return name.replace('-', '').lower() 48 | 49 | 50 | def _find_classes(parent): 51 | _import_dynamic_classes() 52 | classes = _find_all_subclasses(parent) 53 | return classes 54 | 55 | 56 | def _find_subclass(parent, name): 57 | name = _normalize_name(name) 58 | all_classes = _find_classes(parent) 59 | parent_name = parent.__name__.lower() 60 | for cls in all_classes: 61 | cls_name = cls.__name__.lower() 62 | if cls_name.endswith(parent_name): 63 | cls_name = cls_name.replace( 64 | "{}".format(parent_name), "") 65 | if name == cls_name: 66 | return cls 67 | return None 68 | 69 | 70 | class Scripts(object): 71 | """ Manages loading, context, and running of per-project scripts """ 72 | 73 | # Must cache all classes with a reference here, or the GC will murder them 74 | all_classes = set() 75 | 76 | @staticmethod 77 | def load(path='.'): 78 | """ Loads all scripts from ${path}/.builder/**/*.py to make their classes available """ 79 | 80 | if len(Scripts.all_classes) == 0: 81 | Scripts.all_classes = _get_all_dynamic_classes() 82 | 83 | # Load any classes from path 84 | path = os.path.abspath(os.path.join(path, '.builder')) 85 | if os.path.isdir(path): 86 | print('Loading scripts from {}'.format(path)) 87 | scripts = glob.glob(os.path.join(path, '*.py')) 88 | scripts += glob.glob(os.path.join(path, '**', '*.py')) 89 | 90 | for script in scripts: 91 | if not script.endswith('.py'): 92 | continue 93 | 94 | # Ensure that the import path includes the directory the script is in 95 | # so that relative imports work 96 | script_dir = os.path.dirname(script) 97 | if script_dir not in sys.path: 98 | sys.path.append(script_dir) 99 | print("Importing {}".format(os.path.abspath(script)), flush=True) 100 | 101 | name = os.path.split(script)[1].split('.')[0] 102 | spec = importlib.util.spec_from_file_location(name, script) 103 | module = importlib.util.module_from_spec(spec) 104 | spec.loader.exec_module(module) 105 | # Must invalidate caches or sometimes the loaded classes won't be found 106 | # See: https://docs.python.org/3/library/importlib.html#importlib.invalidate_caches 107 | importlib.invalidate_caches() 108 | 109 | # Report newly loaded classes 110 | classes = frozenset(_get_all_dynamic_classes()) 111 | new_classes = classes.difference(Scripts.all_classes) 112 | if new_classes: 113 | print("Imported {}".format( 114 | ', '.join([c.__name__ for c in new_classes]))) 115 | Scripts.all_classes.update(new_classes) 116 | 117 | @staticmethod 118 | def find_action(name): 119 | """ Finds any loaded action class by name and returns it """ 120 | _import_dynamic_classes() 121 | return _find_subclass(Action, name) 122 | 123 | @staticmethod 124 | def find_project(name): 125 | """ Finds any loaded project class by name and returns it """ 126 | _import_dynamic_classes() 127 | return _find_subclass(Project, name) 128 | 129 | @staticmethod 130 | def find_import(name): 131 | """ Finds any loaded import class by name and returns it """ 132 | _import_dynamic_classes() 133 | return _find_subclass(Import, name) 134 | 135 | @staticmethod 136 | def run_action(action, env): 137 | """ Runs an action, and any generated child actions recursively """ 138 | action_type = type(action) 139 | if action_type is str: 140 | try: 141 | action_cls = Scripts.find_action(action) 142 | action = action_cls() 143 | except: 144 | print("Unable to find action {} to run".format(action)) 145 | all_actions = [a.__name__ for a in Scripts._find_actions()] 146 | print("Available actions: \n\t{}".format( 147 | '\n\t'.join(all_actions))) 148 | sys.exit(2) 149 | 150 | print("Running: {}".format(action), flush=True) 151 | children = action.run(env) 152 | if children: 153 | if not isinstance(children, list) and not isinstance(children, tuple): 154 | children = [children] 155 | for child in children: 156 | Scripts.run_action(child, env) 157 | print("Finished: {}".format(action), flush=True) 158 | -------------------------------------------------------------------------------- /builder/core/shell.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import json 5 | import os 6 | import shutil 7 | import tempfile 8 | 9 | from builder.core.host import current_os 10 | from builder.core import util 11 | 12 | 13 | class Shell(object): 14 | """ Virtual shell that abstracts away dry run and tracks/logs state """ 15 | 16 | def __init__(self, dryrun=False): 17 | # Used in dry-run builds to track simulated working directory 18 | self._cwd = os.getcwd() 19 | # pushd/popd stack 20 | self.dir_stack = [] 21 | self.env_stack = [] 22 | self.dryrun = dryrun 23 | self.platform = current_os() 24 | 25 | def _cd(self, directory): 26 | if self.dryrun: 27 | if os.path.isabs(directory) or directory.startswith('$'): 28 | self._cwd = directory 29 | else: 30 | self._cwd = os.path.join(self._cwd, directory) 31 | else: 32 | os.chdir(directory) 33 | 34 | def cd(self, directory, **kwargs): 35 | """ # Helper to run chdir regardless of dry run status """ 36 | if not kwargs.get('quiet', False): 37 | util.log_command("cd", directory) 38 | self._cd(directory) 39 | 40 | def pushd(self, directory, **kwargs): 41 | """ Equivalent to bash/zsh pushd """ 42 | if not kwargs.get('quiet', False): 43 | util.log_command("pushd", directory) 44 | self.dir_stack.append(self.cwd()) 45 | self._cd(directory) 46 | 47 | def popd(self, **kwargs): 48 | """ Equivalent to bash/zsh popd """ 49 | if len(self.dir_stack) > 0: 50 | if not kwargs.get('quiet', False): 51 | util.log_command("popd", self.dir_stack[-1]) 52 | self._cd(self.dir_stack[-1]) 53 | self.dir_stack.pop() 54 | 55 | def mkdir(self, directory, **kwargs): 56 | """ Equivalent to mkdir -p $dir """ 57 | if not kwargs.get('quiet', False): 58 | util.log_command("mkdir", "-p", directory) 59 | if not self.dryrun: 60 | os.makedirs(directory, exist_ok=True) 61 | 62 | def mktemp(self, **kwargs): 63 | """ Makes and returns the path to a temp directory """ 64 | if self.dryrun: 65 | return os.path.expandvars("$TEMP/build") 66 | 67 | return tempfile.mkdtemp() 68 | 69 | def cwd(self): 70 | """ Returns current working directory, accounting for dry-runs """ 71 | if self.dryrun: 72 | return self._cwd 73 | else: 74 | return os.getcwd() 75 | 76 | def setenv(self, var, value, is_secret=False, **kwargs): 77 | """ Set an environment variable """ 78 | if not kwargs.get('quiet', False): 79 | if is_secret: 80 | util.log_command(["export", "{}=***".format(var)]) 81 | else: 82 | util.log_command(["export", "{}={}".format(var, value)]) 83 | if not self.dryrun: 84 | os.environ[var] = str(value) 85 | 86 | def getenv(self, var, default=None): 87 | """ Get an environment variable """ 88 | try: 89 | return os.environ[var] 90 | except: 91 | return default 92 | 93 | def addpathenv(self, var, path, **kwargs): 94 | """Add a path to an environment variable""" 95 | prev = os.getenv(var) 96 | if prev: 97 | value = prev + os.pathsep + path 98 | else: 99 | value = path 100 | self.setenv(var, value, **kwargs) 101 | 102 | def pushenv(self, **kwargs): 103 | """ Store the current environment on a stack, for restoration later """ 104 | if not kwargs.get('quiet', False): 105 | util.log_command(['pushenv']) 106 | self.env_stack.append(dict(os.environ)) 107 | 108 | def popenv(self, **kwargs): 109 | """ Restore the environment to the state on the top of the stack """ 110 | if not kwargs.get('quiet', False): 111 | util.log_command(['popenv']) 112 | env = self.env_stack.pop() 113 | # clear out values that won't be overwritten 114 | for name, value in dict(os.environ).items(): 115 | if name not in env: 116 | del os.environ[name] 117 | # write the old env 118 | for name, value in env.items(): 119 | os.environ[name] = value 120 | 121 | def rm(self, path, **kwargs): 122 | """ Remove a file or directory """ 123 | if not kwargs.get('quiet', False): 124 | util.log_command(['rm', '-rf', path]) 125 | if not self.dryrun: 126 | try: 127 | shutil.rmtree(path) 128 | except Exception as e: 129 | print("Failed to delete dir {}: {}".format(path, e)) 130 | 131 | def where(self, exe, path=None, resolve_symlinks=True, **kwargs): 132 | """ Platform agnostic `where executable` command """ 133 | return util.where(exe, path, resolve_symlinks) 134 | 135 | def exec(self, *command, check=False, quiet=False, always=False, retries=0, working_dir=None): 136 | """ 137 | Executes a shell command, or just logs it for dry runs 138 | Arguments: 139 | check: If true, raise an exception when execution fails 140 | retries: (default 0) How many times to retry the command, useful for network commands 141 | quiet: Do not produce any output 142 | always: If true, run for real in a dryrun 143 | working_dir: If set, the working directory to run the command in 144 | """ 145 | prev_dryrun = self.dryrun 146 | if always: 147 | self.dryrun = False 148 | 149 | result = util.run_command(*command, check=check, quiet=quiet, dryrun=self.dryrun, 150 | retries=retries, working_dir=working_dir) 151 | self.dryrun = prev_dryrun 152 | return result 153 | 154 | def get_secret(self, secret_id, key=None): 155 | """get string from secretsmanager""" 156 | 157 | # NOTE: using AWS CLI instead of boto3 because we know CLI is already 158 | # installed wherever builder is run. Once upon a time we tried using 159 | # boto3 by installing it while the builder was running but this didn't 160 | # work in some rare scenarios. 161 | 162 | cmd = ['aws', '--region', 'us-east-1', 'secretsmanager', 'get-secret-value', 163 | '--secret-id', secret_id] 164 | # NOTE: log command args, but use "quiet" mode so that output isn't printed. 165 | # we don't want secrets leaked to the build log 166 | util.log_command(cmd) 167 | result = self.exec(*cmd, check=True, quiet=True) 168 | secret_value = json.loads(result.output) 169 | if key is not None: 170 | screct_pairs = json.loads(secret_value['SecretString']) 171 | return screct_pairs[key] 172 | else: 173 | return secret_value['SecretString'] 174 | -------------------------------------------------------------------------------- /builder/core/spec.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | 6 | from builder.core.data import * 7 | from builder.core.host import current_host, current_os, current_arch, normalize_arch 8 | 9 | 10 | def validate_spec(build_spec): 11 | 12 | assert build_spec.host in HOSTS, "Host name {} is invalid".format( 13 | build_spec.host) 14 | assert build_spec.target in TARGETS, "Target {} is invalid".format( 15 | build_spec.target) 16 | 17 | assert build_spec.arch in ARCHS, "Architecture {} is invalid".format( 18 | build_spec.target) 19 | 20 | assert build_spec.compiler in COMPILERS, "Compiler {} is invalid".format( 21 | build_spec.compiler) 22 | compiler = COMPILERS[build_spec.compiler] 23 | 24 | assert build_spec.compiler_version in compiler['versions'], "Compiler version {} is invalid for compiler {}".format( 25 | build_spec.compiler_version, build_spec.compiler) 26 | 27 | supported_hosts = compiler['hosts'] 28 | assert build_spec.host in supported_hosts or current_os() in supported_hosts, "Compiler {} does not support host {}".format( 29 | build_spec.compiler, build_spec.host) 30 | 31 | supported_targets = compiler['targets'] 32 | assert build_spec.target in supported_targets, "Compiler {} does not support target {}".format( 33 | build_spec.compiler, build_spec.target) 34 | 35 | 36 | class BuildSpec(object): 37 | """ Refers to a specific build permutation, gets converted into a toolchain """ 38 | 39 | def __init__(self, **kwargs): 40 | for slot in ('host', 'target', 'arch', 'compiler', 'compiler_version'): 41 | setattr(self, slot, 'default') 42 | self.downstream = False 43 | 44 | spec = kwargs.get('spec', None) 45 | if spec: 46 | if spec.startswith('default'): # default or default(-{variant}) 47 | _, *rest = spec.split('-') 48 | elif not '-' in spec: # just a variant 49 | rest = [spec] 50 | else: # Parse the spec from a single string 51 | self.host, self.compiler, self.compiler_version, self.target, self.arch, * \ 52 | rest = spec.split('-') 53 | 54 | for variant in ('downstream',): 55 | if variant in rest: 56 | setattr(self, variant, True) 57 | else: 58 | setattr(self, variant, False) 59 | 60 | # Pull out individual fields. Note this is not in an else to support overriding at construction time 61 | for slot in ('host', 'target', 'arch', 'compiler', 'compiler_version', 'downstream'): 62 | if slot in kwargs and kwargs[slot]: 63 | setattr(self, slot, kwargs[slot]) 64 | 65 | # Convert a target tuple into its component parts 66 | if '-' in self.target: 67 | self.target, self.arch = self.target.split('-') 68 | 69 | # Convert defaults to be based on running environment 70 | if self.host == 'default': 71 | self.host = current_host() 72 | if self.target == 'default': 73 | self.target = current_os() 74 | if self.arch == 'default': 75 | self.arch = current_arch() 76 | else: 77 | self.arch = normalize_arch(self.arch) 78 | 79 | self.name = '-'.join([self.host, self.compiler, 80 | self.compiler_version, self.target, self.arch]) 81 | if self.downstream: 82 | self.name += "-downstream" 83 | 84 | validate_spec(self) 85 | 86 | def __str__(self): 87 | return self.name 88 | 89 | def __repr__(self): 90 | return self.name 91 | 92 | def update_compiler(self, compiler, compiler_version): 93 | self.compiler = compiler 94 | self.compiler_version = compiler_version 95 | 96 | self.name = '-'.join([self.host, self.compiler, 97 | self.compiler_version, self.target, self.arch]) 98 | if self.downstream: 99 | self.name += "-downstream" 100 | 101 | validate_spec(self) 102 | -------------------------------------------------------------------------------- /builder/core/vmod.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from importlib.abc import Loader, MetaPathFinder 5 | import importlib 6 | import sys 7 | 8 | ############################################################################### 9 | # Virtual Module 10 | # borrow the technique from the virtualmod module, allows 'import Builder' in 11 | # .builder/*.py local scripts 12 | ############################################################################### 13 | _virtual_modules = dict() 14 | 15 | 16 | class VirtualModuleMetaclass(type): 17 | def __init__(cls, name, bases, attrs): 18 | # Initialize the class 19 | super(VirtualModuleMetaclass, cls).__init__(name, bases, attrs) 20 | 21 | # Do not register VirtualModule 22 | if name == 'VirtualModule': 23 | return 24 | 25 | module_name = getattr(cls, '__module_name__', cls.__name__) or name 26 | module = VirtualModule.create_module(module_name) 27 | 28 | # Copy over class attributes 29 | for key, value in attrs.items(): 30 | if key in ('__name__', '__module_name__', '__module__', '__qualname__'): 31 | continue 32 | setattr(module, key, value) 33 | 34 | 35 | class VirtualModule(metaclass=VirtualModuleMetaclass): 36 | class Finder(MetaPathFinder): 37 | @staticmethod 38 | def find_spec(fullname, path, target=None): 39 | if fullname in _virtual_modules: 40 | return _virtual_modules[fullname].__spec__ 41 | return None 42 | 43 | @staticmethod 44 | def invalidate_caches(): 45 | pass 46 | 47 | class VirtualLoader(Loader): 48 | @staticmethod 49 | def create_module(spec): 50 | if spec.name not in _virtual_modules: 51 | return None 52 | 53 | return _virtual_modules[spec.name] 54 | 55 | @staticmethod 56 | def exec_module(module): 57 | module_name = module.__name__ 58 | if hasattr(module, '__spec__'): 59 | module_name = module.__spec__.name 60 | 61 | sys.modules[module_name] = module 62 | 63 | @staticmethod 64 | def create_module(name): 65 | module_cls = type(sys) 66 | spec_cls = type(sys.__spec__) 67 | module = module_cls(name) 68 | setattr(module, '__spec__', spec_cls( 69 | name=name, loader=VirtualModule.VirtualLoader)) 70 | _virtual_modules[name] = module 71 | return module 72 | 73 | 74 | sys.meta_path.insert(0, VirtualModule.Finder) 75 | -------------------------------------------------------------------------------- /builder/imports/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | # import all files in this directory 5 | import importlib 6 | from os.path import dirname, basename, isfile, join 7 | import glob 8 | import re 9 | import sys 10 | import zipfile 11 | 12 | modules = [] 13 | try: 14 | # If running in a zipapp, we have to enumerate the zip app instead of the directory 15 | with zipfile.ZipFile(sys.argv[0]) as app: 16 | # parent_package = 'builder.imports' 17 | files = app.namelist() 18 | for f in files: 19 | if re.match(r'builder/imports/[a-zA-Z0-9].+.py', f): 20 | modules += ['builder.imports.' + basename(f)[:-3]] 21 | except: 22 | # Must not be a zipapp, look on disk 23 | modules = glob.glob(join(dirname(__file__), "*.py")) 24 | modules = ['builder.imports.' + basename(f)[:-3] for f in modules if isfile(f) 25 | and not f.endswith('__init__.py')] 26 | 27 | for module in modules: 28 | importlib.import_module(module) 29 | -------------------------------------------------------------------------------- /builder/imports/awslc.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | 6 | from builder.core.project import Project, Import 7 | from builder.actions.git import DownloadSource 8 | 9 | 10 | config = { 11 | 'targets': ['linux', 'android'], 12 | 'test_steps': [], 13 | 'build_tests': False, 14 | 'cmake_args': ['-DDISABLE_GO=ON', '-DBUILD_LIBSSL=OFF', '-DDISABLE_PERL=ON'] 15 | } 16 | 17 | 18 | class AWSLCImport(Import): 19 | def __init__(self, **kwargs): 20 | if kwargs.get('name'): 21 | del kwargs['name'] 22 | super().__init__( 23 | library=True, 24 | account='aws', 25 | name='aws-lc', 26 | config=config, 27 | **kwargs) 28 | 29 | def pre_build(self, env): 30 | # Search for an aws-lc directory 31 | if not hasattr(self, 'path'): 32 | for root, dirs, files in os.walk(env.deps_dir): 33 | for search_dir in dirs: 34 | if search_dir.endswith('aws-lc'): 35 | self.path = os.path.join(root, search_dir) 36 | break 37 | # No aws-lc directory, download to deps dir now 38 | if not hasattr(self, 'path'): 39 | self.path = os.path.join(env.deps_dir, 'aws-lc') 40 | DownloadSource(project=Project.find_project(self.name), path=env.deps_dir).run(env) 41 | 42 | def build(self, env): 43 | return Project.build(Project.find_project(self.name, [self.path]), env) 44 | 45 | 46 | class AWSLCProject(Project): 47 | def __init__(self, **kwargs): 48 | if kwargs.get('name'): 49 | del kwargs['name'] 50 | super().__init__( 51 | account='aws', 52 | name='aws-lc', 53 | **config, 54 | **kwargs) 55 | 56 | def cmake_args(self, env): 57 | if env.spec.compiler == 'gcc' and env.spec.compiler_version.startswith('4.'): 58 | # Disable AVX512 on old GCC versions for aws-lc as they dont support AVX512 instructions used by aws-lc 59 | return super().cmake_args(env) + ['-DMY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX=ON'] 60 | 61 | return super().cmake_args(env) 62 | -------------------------------------------------------------------------------- /builder/imports/boringssl.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.project import Project, Import 5 | 6 | 7 | config = { 8 | 'targets': ['macos', 'linux', 'android'], 9 | 'test_steps': [], 10 | 'build_tests': False, 11 | 'cmake_args': [] 12 | } 13 | 14 | 15 | class BoringSSLImport(Import): 16 | def __init__(self, **kwargs): 17 | super().__init__( 18 | library=True, 19 | name='boringssl', 20 | config=config, 21 | **kwargs) 22 | 23 | def cmake_args(self, env): 24 | assert self.installed 25 | return super().cmake_args(env) + [ 26 | "-DLibCrypto_INCLUDE_DIR={}/include".format(self.prefix), 27 | "-DLibCrypto_STATIC_LIBRARY={}/lib/libcrypto.a".format( 28 | self.prefix), 29 | ] 30 | 31 | 32 | class BoringSSLProject(Project): 33 | def __init__(self, **kwargs): 34 | super().__init__( 35 | account='google', 36 | url='https://github.com/google/boringssl.git', 37 | **config, 38 | **kwargs) 39 | -------------------------------------------------------------------------------- /builder/imports/dockcross.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.host import current_os 5 | from builder.core.project import Import 6 | 7 | from pathlib import Path 8 | import os 9 | 10 | 11 | class Dockcross(Import): 12 | def __init__(self, **kwargs): 13 | super().__init__( 14 | compiler=True, 15 | config={ 16 | 'targets': ['linux'], 17 | }, 18 | **kwargs) 19 | self.installed = False 20 | 21 | def resolved(self): 22 | return True 23 | 24 | def install(self, env): 25 | if self.installed: 26 | return 27 | 28 | sh = env.shell 29 | toolchain = env.toolchain 30 | 31 | sudo = env.config.get('sudo', current_os() == 'linux') 32 | sudo = ['sudo'] if sudo else [] 33 | 34 | print( 35 | 'Installing cross-compile via dockcross for {}'.format(toolchain.platform)) 36 | cross_compile_platform = env.config.get( 37 | 'cross_compile_platform', toolchain.platform) 38 | result = sh.exec( 39 | 'docker', 'run', 'dockcross/{}'.format(cross_compile_platform), quiet=True, check=True) 40 | # Strip off any output from docker itself 41 | output, shebang, script = result.output.partition('#!') 42 | script = shebang + script 43 | print(output) 44 | assert result.returncode == 0 45 | 46 | dockcross = os.path.abspath(os.path.join( 47 | env.build_dir, 'dockcross-{}'.format(cross_compile_platform))) 48 | Path(dockcross).touch(0o755) 49 | with open(dockcross, "w+t") as f: 50 | f.write(script) 51 | sh.exec('chmod', 'a+x', dockcross) 52 | 53 | # Write out build_dir/dockcross.env file to init the dockcross env with 54 | # other code can add to this 55 | dockcross_env = os.path.join(env.build_dir, 'dockcross.env') 56 | with open(dockcross_env, "w+") as f: 57 | f.write('#env for dockcross\n') 58 | toolchain.env_file = dockcross_env 59 | toolchain.shell_env = [ 60 | dockcross, '-a', '--env-file={}'.format(dockcross_env)] 61 | 62 | self.installed = True 63 | -------------------------------------------------------------------------------- /builder/imports/gcc.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.host import current_os 5 | from builder.actions.install import InstallPackages 6 | from builder.core.project import Import 7 | from builder.actions.script import Script 8 | from builder.core.toolchain import Toolchain 9 | from builder.core.util import UniqueList 10 | 11 | 12 | class GCC(Import): 13 | def __init__(self, **kwargs): 14 | super().__init__( 15 | compiler=True, 16 | config={ 17 | 'targets': ['linux'], 18 | }, 19 | **kwargs) 20 | self.installed = False 21 | 22 | def resolved(self): 23 | return True 24 | 25 | def install(self, env): 26 | if self.installed: 27 | return 28 | 29 | config = env.config 30 | 31 | # Ensure additional compiler packages are installed 32 | packages = UniqueList(config.get('compiler_packages', [])) 33 | packages = [p for p in packages if not p.startswith('gcc')] 34 | Script([InstallPackages(packages)], 35 | name='Install compiler prereqs').run(env) 36 | 37 | installed_path, installed_version = Toolchain.find_compiler( 38 | env.spec.compiler, env.spec.compiler_version) 39 | if installed_path: 40 | print('Compiler {} {} already exists at {}'.format( 41 | env.spec.compiler, installed_version, installed_path)) 42 | self.installed = True 43 | return 44 | 45 | # It's ok to attempt to install packages redundantly, they won't hurt anything 46 | packages = UniqueList(config.get('compiler_packages', [])) 47 | 48 | Script([InstallPackages(packages)], name='install gcc').run(env) 49 | 50 | self.installed = True 51 | -------------------------------------------------------------------------------- /builder/imports/golang.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import os 5 | from pathlib import Path 6 | 7 | from builder.core.fetch import fetch_and_extract, mirror_package 8 | from builder.core.project import Import 9 | import builder.core.util as util 10 | from builder.core.host import current_platform 11 | 12 | URLs = { 13 | 'linux-armv6': 'https://go.dev/dl/go1.21.5.linux-armv6l.tar.gz', 14 | 'linux-armv7': 'https://go.dev/dl/go1.21.5.linux-armv6l.tar.gz', 15 | 'linux-armv8': 'https://go.dev/dl/go1.21.5.linux-arm64.tar.gz', 16 | 'linux-x86': 'https://go.dev/dl/go1.21.5.linux-386.tar.gz', 17 | 'linux-x64': 'https://go.dev/dl/go1.21.5.linux-amd64.tar.gz', 18 | 'openbsd-x64': 'https://go.dev/dl/go1.21.5.linux-amd64.tar.gz', 19 | 'windows-x64': 'https://go.dev/dl/go1.21.5.windows-amd64.zip', 20 | 'windows-x86': 'https://go.dev/dl/go1.21.5.windows-386.zip', 21 | 'macos-x64': 'https://go.dev/dl/go1.21.5.darwin-amd64.tar.gz', 22 | 'macos-armv8': 'https://go.dev/dl/go1.21.5.darwin-arm64.tar.gz', 23 | } 24 | 25 | 26 | class GOLANG(Import): 27 | def __init__(self, **kwargs): 28 | super().__init__( 29 | config={}, 30 | **kwargs) 31 | self.path = None 32 | self.installed = False 33 | 34 | def resolved(self): 35 | return True 36 | 37 | def install(self, env): 38 | if self.installed: 39 | return 40 | 41 | sh = env.shell 42 | 43 | target = '{}-{}'.format(env.spec.target, env.spec.arch) 44 | 45 | cross_compile = util.deep_get(env, 'toolchain.cross_compile', False) 46 | 47 | # If this is a local build, check the local machine 48 | if not cross_compile or target not in URLs: 49 | # run `go version` 50 | result = util.run_command('go', 'version') 51 | if result.returncode == 0: 52 | # check the version, we need version >=1.18 53 | version_str = result.output.split(" ")[2][2:] 54 | version_numbers = list(map(int, version_str.split('.'))) 55 | compare_version_numbers = list(map(int, "1.18.0".split('.'))) 56 | if version_numbers >= compare_version_numbers: 57 | return 58 | 59 | if target not in URLs: 60 | raise EnvironmentError( 61 | 'No pre-built binaries for {} are available, please install golang greater than 1.18'.format(target)) 62 | 63 | install_dir = os.path.join(env.deps_dir, self.name.lower()) 64 | # If path is going to be relative, it has to be relative to the source directory 65 | self.path = str(Path(install_dir).relative_to(env.root_dir)) 66 | print('Installing pre-built golang binaries for {} to {}'.format( 67 | target, install_dir)) 68 | 69 | sh.mkdir(install_dir) 70 | if cross_compile: 71 | # If cross compile using the go execuble for current platform instead to codegen 72 | url = URLs[current_platform()] 73 | else: 74 | url = URLs[target] 75 | ext = '.tar.gz' if url.endswith('.tar.gz') else '.zip' 76 | filename = '{}/golang{}'.format(install_dir, ext) 77 | print('Downloading {}'.format(url)) 78 | fetch_and_extract(url, filename, install_dir) 79 | os.remove(filename) 80 | 81 | # Set PATH 82 | if cross_compile: 83 | # Path to go binary 84 | env.variables['go_path'] = "/work/"+str(Path(os.path.join(install_dir, 'go/bin') 85 | ).relative_to(env.root_dir)) 86 | else: 87 | # export the PATH directly if not cross compile. 88 | # env.variables['go_path'] = '{}/go/bin'.format(install_dir) 89 | sh.setenv('PATH', '{}{}{}'.format('{}/go/bin'.format(install_dir), os.pathsep, sh.getenv('PATH'))) 90 | 91 | self.installed = True 92 | 93 | def mirror(self, env): 94 | for src_url in URLs.values(): 95 | mirror_package(self.name, src_url) 96 | -------------------------------------------------------------------------------- /builder/imports/jdk.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import glob 5 | import os 6 | from pathlib import Path 7 | import tarfile 8 | import zipfile 9 | 10 | from builder.core.fetch import fetch_and_extract, mirror_package 11 | from builder.core.project import Import 12 | import builder.core.util as util 13 | 14 | URLs = { 15 | 'linux-armv6': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u232-b09/OpenJDK8U-jdk_arm_linux_hotspot_8u232b09.tar.gz', 16 | 'linux-armv7': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u232-b09/OpenJDK8U-jdk_arm_linux_hotspot_8u232b09.tar.gz', 17 | 'linux-armv8': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u242-b08/OpenJDK8U-jdk_aarch64_linux_hotspot_jdk8u242-b08.tar.gz', 18 | 'linux-x64': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u242-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u242b08.tar.gz', 19 | 'windows-x64': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u242-b08/OpenJDK8U-jdk_x64_windows_hotspot_8u242b08.zip', 20 | 'windows-x86': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u242-b08/OpenJDK8U-jdk_x86-32_windows_hotspot_8u242b08.zip', 21 | 'macos-x64': 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u242-b08/OpenJDK8U-jdk_x64_mac_hotspot_8u242b08.tar.gz', 22 | 'macos-armv8': 'https://corretto.aws/downloads/resources/8.422.05.1/amazon-corretto-8.422.05.1-macosx-aarch64.tar.gz', 23 | } 24 | 25 | 26 | class JDK8(Import): 27 | def __init__(self, **kwargs): 28 | super().__init__( 29 | config={}, 30 | **kwargs) 31 | self.path = None 32 | self.installed = False 33 | 34 | def resolved(self): 35 | return True 36 | 37 | def install(self, env): 38 | if self.installed: 39 | return 40 | 41 | sh = env.shell 42 | 43 | target = '{}-{}'.format(env.spec.target, env.spec.arch) 44 | 45 | cross_compile = util.deep_get(env, 'toolchain.cross_compile', False) 46 | 47 | # If this is a local build, check the local machine 48 | if not cross_compile or target not in URLs: 49 | javac_path = util.where('javac') 50 | if javac_path: 51 | javac_path = javac_path.replace('/bin/javac', '') 52 | prefixes = [javac_path, os.environ.get('JAVA_HOME', None)] 53 | required_files = [ 54 | ['include/jni.h'], 55 | ['lib/**/libjvm.so', '**/lib/**/libjvm.so', 56 | 'lib/**/jvm.dll', '**/lib/**/jvm.dll'], 57 | ] 58 | found = 0 59 | 60 | for paths in required_files: 61 | path_found = False 62 | for path in paths: 63 | for prefix in prefixes: 64 | if not prefix: 65 | continue 66 | full_path = os.path.join(prefix, path) 67 | if glob.glob(full_path, recursive=True): 68 | found += 1 69 | path_found = True 70 | break 71 | if path_found: 72 | break 73 | 74 | if found >= len(required_files): 75 | print('Found existing JDK8 at {}'.format(prefix)) 76 | self.path = prefix 77 | env.variables['java_home'] = self.path 78 | self.installed = True 79 | return 80 | 81 | if target not in URLs: 82 | raise EnvironmentError( 83 | 'No pre-built binaries for {} are available, please install JDK8 or greater and set JAVA_HOME'.format(target)) 84 | 85 | install_dir = os.path.join(env.deps_dir, self.name.lower()) 86 | # If path is going to be relative, it has to be relative to the source directory 87 | self.path = str(Path(install_dir).relative_to(env.root_dir)) 88 | print('Installing pre-built JDK binaries for {} to {}'.format( 89 | target, install_dir)) 90 | 91 | sh.mkdir(install_dir) 92 | url = URLs[target] 93 | ext = '.tar.gz' if url.endswith('.tar.gz') else '.zip' 94 | filename = '{}/jdk8{}'.format(install_dir, ext) 95 | print('Downloading {}'.format(url)) 96 | fetch_and_extract(url, filename, install_dir) 97 | os.remove(filename) 98 | 99 | jdk_home = glob.glob(os.path.join(install_dir, '*jdk*'))[0] 100 | assert jdk_home 101 | 102 | # OSX is special and has a Contents/Home folder inside the distro 103 | if env.spec.target == 'macos': 104 | jdk_home = os.path.join(jdk_home, 'Contents', 'Home') 105 | 106 | # Use absolute path for local, relative for cross-compile 107 | self.path = jdk_home 108 | if cross_compile: 109 | self.path = str(Path(os.path.join(install_dir, jdk_home) 110 | ).relative_to(env.root_dir)) 111 | 112 | env.variables['java_home'] = self.path 113 | self.installed = True 114 | 115 | def mirror(self, env): 116 | for src_url in URLs.values(): 117 | mirror_package(self.name, src_url) 118 | -------------------------------------------------------------------------------- /builder/imports/libcrypto.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.fetch import fetch_and_extract 5 | from builder.core.host import current_host 6 | from builder.core.project import Import 7 | 8 | import argparse 9 | import os 10 | from pathlib import Path 11 | from shutil import copytree 12 | import time 13 | 14 | 15 | class LibCrypto(Import): 16 | 17 | def __init__(self, **kwargs): 18 | super().__init__( 19 | library=True, 20 | config={ 21 | 'targets': ['linux'], 22 | 'build_steps': [], 23 | 'test_steps': [], 24 | }, 25 | url='https://d19elf31gohf1l.cloudfront.net/_binaries/libcrypto/libcrypto-{version}-{os}-{arch}.tar.gz', 26 | **kwargs) 27 | self.prefix = '/opt/openssl' 28 | self.installed = False 29 | 30 | def resolved(self): 31 | return True 32 | 33 | def install(self, env): 34 | if self.installed: 35 | return 36 | 37 | sh = env.shell 38 | 39 | install_dir = os.path.join(env.deps_dir, self.name) 40 | 41 | def _use_libcrypto(path): 42 | if not self.installed: 43 | os.symlink(path, install_dir, True) 44 | self.installed = True 45 | # If path to libcrypto is going to be relative, it has to be relative to the 46 | # source directory 47 | self.prefix = str(Path(install_dir).relative_to(env.root_dir)) 48 | env.variables['libcrypto_path'] = self.prefix 49 | 50 | parser = argparse.ArgumentParser() 51 | parser.add_argument('--libcrypto', default=None) 52 | args = parser.parse_known_args(env.args.args)[0] 53 | 54 | if args.libcrypto: 55 | print('Using custom libcrypto: {}'.format(args.libcrypto)) 56 | return _use_libcrypto(args.libcrypto) 57 | 58 | # AL2012 has a pre-built libcrypto, since its linker is from another world 59 | if current_host() == 'al2012': 60 | print('Using image libcrypto: /opt/openssl') 61 | return _use_libcrypto('/opt/openssl') 62 | 63 | print('Installing pre-built libcrypto binaries for {}-{} to {}'.format( 64 | env.spec.target, env.spec.arch, install_dir)) 65 | 66 | lib_version = '1.1.1' 67 | lib_os = env.spec.target 68 | if current_host() == 'manylinux' and env.spec.arch != 'armv8': 69 | lib_os = 'manylinux' 70 | lib_version = '1.0.2' 71 | url = self.url.format(version=lib_version, 72 | os=lib_os, arch=env.spec.arch) 73 | filename = '{}/libcrypto.tar.gz'.format(install_dir) 74 | print('Downloading {}'.format(url)) 75 | fetch_and_extract(url, filename, install_dir) 76 | print('Extracted {} to {}'.format(filename, install_dir)) 77 | 78 | self.installed = True 79 | return _use_libcrypto(install_dir) 80 | 81 | def cmake_args(self, env): 82 | assert self.installed 83 | return super().cmake_args(env) + [ 84 | "-DLibCrypto_INCLUDE_DIR={}/include".format(self.prefix), 85 | "-DLibCrypto_SHARED_LIBRARY={}/lib/libcrypto.so".format( 86 | self.prefix), 87 | "-DLibCrypto_STATIC_LIBRARY={}/lib/libcrypto.a".format( 88 | self.prefix), 89 | ] 90 | -------------------------------------------------------------------------------- /builder/imports/llvm.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | 5 | from builder.core.host import current_os 6 | from builder.core.project import Import 7 | from builder.core.toolchain import Toolchain 8 | from builder.core.util import UniqueList 9 | from builder.actions.install import InstallPackages 10 | from builder.actions.script import Script 11 | 12 | import os 13 | import stat 14 | import tempfile 15 | 16 | # this is a copy of https://apt.llvm.org/llvm.sh modified to add support back in 17 | # for older versions of clang < 8, and removed the need for clangd, lldb 18 | 19 | LLVM_SH = """\ 20 | #!/bin/bash 21 | ################################################################################ 22 | # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 23 | # See https://llvm.org/LICENSE.txt for license information. 24 | # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 25 | ################################################################################ 26 | # 27 | # This script will install the llvm toolchain on the different 28 | # Debian and Ubuntu versions 29 | 30 | set -eux 31 | 32 | # read optional command line argument 33 | LLVM_VERSION=18 34 | if [ "$#" -eq 1 ]; then 35 | LLVM_VERSION=$1 36 | fi 37 | 38 | DISTRO=$(lsb_release -is) 39 | VERSION=$(lsb_release -sr) 40 | DIST_VERSION="${DISTRO}_${VERSION}" 41 | 42 | if [[ $EUID -ne 0 ]]; then 43 | echo "This script must be run as root!" 44 | exit 1 45 | fi 46 | 47 | declare -A LLVM_VERSION_PATTERNS 48 | LLVM_VERSION_PATTERNS[3]="-3.9" 49 | LLVM_VERSION_PATTERNS[6]="-6.0" 50 | LLVM_VERSION_PATTERNS[7]="-7" 51 | LLVM_VERSION_PATTERNS[8]="-8" 52 | LLVM_VERSION_PATTERNS[9]="-9" 53 | LLVM_VERSION_PATTERNS[10]="-10" 54 | LLVM_VERSION_PATTERNS[11]="-11" 55 | LLVM_VERSION_PATTERNS[12]="-12" 56 | LLVM_VERSION_PATTERNS[13]="-13" 57 | LLVM_VERSION_PATTERNS[14]="-14" 58 | LLVM_VERSION_PATTERNS[15]="-15" 59 | LLVM_VERSION_PATTERNS[16]="-16" 60 | LLVM_VERSION_PATTERNS[17]="-17" 61 | LLVM_VERSION_PATTERNS[18]="-18" 62 | 63 | if [ ! ${LLVM_VERSION_PATTERNS[$LLVM_VERSION]+_} ]; then 64 | echo "This script does not support LLVM version $LLVM_VERSION" 65 | exit 3 66 | fi 67 | 68 | LLVM_VERSION_STRING=${LLVM_VERSION_PATTERNS[$LLVM_VERSION]} 69 | 70 | # find the right repository name for the distro and version 71 | case "$DIST_VERSION" in 72 | Debian_9* ) REPO_NAME="deb http://apt.llvm.org/stretch/ llvm-toolchain-stretch$LLVM_VERSION_STRING main" ;; 73 | Debian_10* ) REPO_NAME="deb http://apt.llvm.org/buster/ llvm-toolchain-buster$LLVM_VERSION_STRING main" ;; 74 | Debian_11* ) REPO_NAME="deb http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye$LLVM_VERSION_STRING main" ;; 75 | Debian_12* ) REPO_NAME="deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm$LLVM_VERSION_STRING main" ;; 76 | Debian_unstable ) REPO_NAME="deb http://apt.llvm.org/unstable/ llvm-toolchain$LLVM_VERSION_STRING main" ;; 77 | Debian_testing ) REPO_NAME="deb http://apt.llvm.org/unstable/ llvm-toolchain$LLVM_VERSION_STRING main" ;; 78 | Ubuntu_16.04 ) REPO_NAME="deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial$LLVM_VERSION_STRING main" ;; 79 | Ubuntu_18.04 ) REPO_NAME="deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic$LLVM_VERSION_STRING main" ;; 80 | Ubuntu_18.10 ) REPO_NAME="deb http://apt.llvm.org/cosmic/ llvm-toolchain-cosmic$LLVM_VERSION_STRING main" ;; 81 | Ubuntu_19.04 ) REPO_NAME="deb http://apt.llvm.org/disco/ llvm-toolchain-disco$LLVM_VERSION_STRING main" ;; 82 | Ubuntu_19.10 ) REPO_NAME="deb http://apt.llvm.org/eoan/ llvm-toolchain-eoan$LLVM_VERSION_STRING main" ;; 83 | Ubuntu_20.04 ) REPO_NAME="deb http://apt.llvm.org/focal/ llvm-toolchain-focal$LLVM_VERSION_STRING main" ;; 84 | Ubuntu_20.10 ) REPO_NAME="deb http://apt.llvm.org/groovy/ llvm-toolchain-groovy$LLVM_VERSION_STRING main" ;; 85 | Ubuntu_21.04 ) REPO_NAME="deb http://apt.llvm.org/hirsute/ llvm-toolchain-hirsute$LLVM_VERSION_STRING main" ;; 86 | Ubuntu_22.04 ) REPO_NAME="deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy$LLVM_VERSION_STRING main" ;; 87 | Ubuntu_24.04 ) REPO_NAME="deb http://apt.llvm.org/noble/ llvm-toolchain-noble$LLVM_VERSION_STRING main" ;; 88 | * ) 89 | echo "Distribution '$DISTRO' in version '$VERSION' is not supported by this script (${DIST_VERSION})." 90 | exit 2 91 | esac 92 | 93 | 94 | # install everything 95 | if [[ $LLVM_VERSION -ne 3 ]]; then 96 | curl -sSL https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - 97 | add-apt-repository "${REPO_NAME}" 98 | apt-get update 99 | fi 100 | apt-get install -y clang$LLVM_VERSION_STRING 101 | """ 102 | 103 | 104 | class LLVM(Import): 105 | def __init__(self, **kwargs): 106 | super().__init__( 107 | compiler=True, 108 | config={ 109 | 'targets': ['linux'], 110 | }, 111 | **kwargs) 112 | self.installed = False 113 | 114 | def resolved(self): 115 | return True 116 | 117 | def install(self, env): 118 | if self.installed: 119 | return 120 | 121 | sh = env.shell 122 | config = env.config 123 | 124 | # Ensure compiler packages are installed 125 | packages = UniqueList(config.get('compiler_packages', [])) 126 | Script([InstallPackages(packages)], 127 | name='Install compiler prereqs').run(env) 128 | 129 | installed_path, installed_version = Toolchain.find_compiler( 130 | env.spec.compiler, env.spec.compiler_version) 131 | if installed_path: 132 | print('Compiler {} {} already exists at {}'.format( 133 | env.spec.compiler, installed_version, installed_path)) 134 | self.installed = True 135 | return 136 | 137 | sudo = env.config.get('sudo', current_os() == 'linux') 138 | sudo = ['sudo'] if sudo else [] 139 | 140 | # Strip minor version info 141 | version = env.toolchain.compiler_version.replace(r'\..+', '') 142 | 143 | script = tempfile.NamedTemporaryFile(delete=False) 144 | script_path = script.name 145 | script.write(LLVM_SH.encode()) 146 | script.close() 147 | 148 | # Make script executable 149 | os.chmod(script_path, stat.S_IRUSR | stat.S_IRGRP | 150 | stat.S_IROTH | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) 151 | sh.exec(*sudo, [script_path, version], check=True) 152 | 153 | self.installed = True 154 | -------------------------------------------------------------------------------- /builder/imports/msvc.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.host import current_os 5 | from builder.core.project import Import 6 | from builder.core.toolchain import Toolchain 7 | from builder.core.util import UniqueList 8 | from builder.actions.install import InstallPackages 9 | from builder.actions.script import Script 10 | 11 | 12 | class MSVC(Import): 13 | def __init__(self, **kwargs): 14 | super().__init__( 15 | compiler=True, 16 | config={ 17 | 'targets': ['windows'], 18 | }, 19 | **kwargs) 20 | self.installed = False 21 | 22 | def resolved(self): 23 | return True 24 | 25 | def install(self, env): 26 | if self.installed: 27 | return 28 | 29 | config = env.config 30 | 31 | # Ensure compiler packages are installed 32 | packages = UniqueList(config.get('compiler_packages', [])) 33 | Script([InstallPackages(packages)], 34 | name='Install compiler prereqs').run(env) 35 | 36 | installed_path, installed_version = Toolchain.find_compiler( 37 | env.spec.compiler, env.spec.compiler_version) 38 | if installed_path: 39 | print('Compiler {} {} already exists at {}'.format( 40 | env.spec.compiler, installed_version, installed_path)) 41 | self.installed = True 42 | return 43 | 44 | raise EnvironmentError('MSVC does not support dynamic install, and {} {} could not be found'.format( 45 | env.spec.compiler, env.spec.compiler_version)) 46 | -------------------------------------------------------------------------------- /builder/imports/ndk.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import glob 5 | import os 6 | from pathlib import Path 7 | from urllib.parse import urlparse 8 | import zipfile 9 | 10 | from builder.core.fetch import fetch_and_extract, mirror_package 11 | from builder.core.project import Import 12 | from builder.core.util import chmod_exec 13 | 14 | 15 | ANDROID_NDK_VERSION = '16b' 16 | 17 | 18 | class NDK(Import): 19 | 20 | def __init__(self, **kwargs): 21 | super().__init__( 22 | name='ndk-r{}'.format(ANDROID_NDK_VERSION), 23 | config={ 24 | 'targets': ['linux'], 25 | 'build_steps': [], 26 | 'test_steps': [], 27 | }, 28 | url='https://dl.google.com/android/repository/android-ndk-r{}-linux-x86_64.zip'.format( 29 | ANDROID_NDK_VERSION), 30 | **kwargs) 31 | self.prefix = '' 32 | self.installed = False 33 | 34 | def resolved(self): 35 | return True 36 | 37 | def install(self, env): 38 | if self.installed: 39 | return 40 | 41 | sh = env.shell 42 | 43 | install_dir = os.path.join( 44 | env.deps_dir, 'android-ndk-r{}'.format(ANDROID_NDK_VERSION)) 45 | # If path to NDK is going to be relative, it has to be relative to the 46 | # source directory 47 | self.prefix = str(Path(install_dir).relative_to(env.root_dir)) 48 | # Export ndk_path 49 | env.variables['ndk_path'] = os.path.join('/work', self.prefix) 50 | print('Installing NDK r{} to {}'.format( 51 | ANDROID_NDK_VERSION, install_dir)) 52 | 53 | sh.mkdir(install_dir) 54 | filename = '{}/ndk-r{}.zip'.format(install_dir, ANDROID_NDK_VERSION) 55 | # Extract to deps dir, because zip file contains android-ndk-r{version} directory 56 | fetch_and_extract(self.url, filename, env.deps_dir) 57 | binaries = glob.glob( 58 | os.path.join(self.prefix, 'toolchains/llvm/prebuilt/linux-x86_64/bin/*')) 59 | for binary in binaries: 60 | chmod_exec(binary) 61 | 62 | self.installed = True 63 | 64 | def mirror(self, env): 65 | mirror_package(self.name, self.url) 66 | -------------------------------------------------------------------------------- /builder/imports/nodejs.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0. 4 | 5 | from builder.core.fetch import fetch_script, fetch_and_extract 6 | from builder.core.host import current_os, current_arch 7 | from builder.core.project import Import 8 | import builder.core.util as util 9 | from builder.actions.install import InstallPackages 10 | from builder.actions.script import Script 11 | 12 | import stat 13 | import os 14 | import re 15 | 16 | 17 | NVM = r"""\ 18 | #!/usr/bin/env bash 19 | export NVM_DIR="$HOME/.nvm" 20 | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" 21 | nvm $* 22 | """ 23 | 24 | DEFAULT_VERSION = '12' 25 | 26 | 27 | class NodeJS(Import): 28 | def __init__(self, **kwargs): 29 | super().__init__( 30 | compiler=True, 31 | config={ 32 | 'targets': ['linux'], 33 | }, 34 | **kwargs) 35 | self.url = 'https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh' 36 | self.version = kwargs.get('version', DEFAULT_VERSION) 37 | 38 | self.nvm = 'nvm' 39 | self.installed = False 40 | 41 | def install(self, env): 42 | if self.installed or (util.where('node') and current_os() == 'windows'): 43 | return 44 | 45 | sh = env.shell 46 | 47 | self.install_dir = os.path.join(env.deps_dir, self.name) 48 | sh.mkdir(self.install_dir) 49 | 50 | if current_arch() == "x86": 51 | self.install_node_via_unofficial_build(env) 52 | else: 53 | if current_os() == 'windows': 54 | self.install_nvm_choco(env) 55 | else: 56 | self.install_nvm_sh(env) 57 | 58 | self.install_node_via_nvm(env) 59 | 60 | self.installed = True 61 | 62 | def install_node_via_nvm(self, env): 63 | sh = env.shell 64 | # Install node 65 | sh.exec(self.nvm, 'install', self.version, check=True) 66 | 67 | # Fetch path to installed node, add to PATH 68 | if current_os() != 'windows': 69 | result = sh.exec(self.nvm, 'which', self.version, check=True) 70 | node_path = os.path.dirname(result.output) 71 | sh.setenv('PATH', '{}{}{}'.format( 72 | node_path, os.pathsep, sh.getenv('PATH'))) 73 | else: 74 | sh.exec('nvm', 'use', '10.16', check=True) 75 | sh.exec('refreshenv', check=True) 76 | 77 | sh.exec('node', '--version', check=True) 78 | 79 | def install_nvm_choco(self, env): 80 | sh = env.shell 81 | Script([InstallPackages(['nvm'],)]).run(env) 82 | env_script = r'{}\dump_env.bat'.format(self.install_dir) 83 | with open(env_script, 'w+') as script: 84 | script.writelines( 85 | [ 86 | 'call refreshenv.cmd\n', 87 | 'set\n' 88 | ] 89 | ) 90 | script.flush() 91 | result = sh.exec(env_script, check=True, quiet=True) 92 | lines = result.output.split('\n') 93 | vars = {} 94 | for line in lines: 95 | if '=' in line: 96 | key, value = line.split('=', 1) 97 | vars[key.upper()] = value 98 | # Update path and NVM_* env vars 99 | sh.setenv('PATH', vars['PATH']) 100 | for key, value in vars.items(): 101 | if key.startswith('NVM_'): 102 | sh.setenv(key, value) 103 | sh.exec('nvm', 'version', check=True) 104 | 105 | def install_nvm_sh(self, env): 106 | sh = env.shell 107 | print('Installing nvm and node {} via nvm'.format(self.version)) 108 | 109 | # Download nvm 110 | filename = '{}/install-nvm.sh'.format(self.install_dir) 111 | print('Downloading {} to {}'.format(self.url, filename)) 112 | fetch_script(self.url, filename) 113 | sh.exec(filename, check=True) 114 | 115 | # Install wrapper to run NVM 116 | run_nvm = '{}/run-nvm.sh'.format(self.install_dir) 117 | with open(run_nvm, 'w+') as nvm_sh: 118 | nvm_sh.write(NVM) 119 | os.chmod(run_nvm, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) 120 | self.nvm = run_nvm 121 | 122 | def install_node_via_unofficial_build(self, env): 123 | sh = env.shell 124 | print('Installing node build directly'.format(self.version)) 125 | 126 | # Normaliz version format, please note 12.16.3 is the last version has x86 support 127 | def normalize_version(v): 128 | append_times = 0 129 | while re.match(r'^([0-9]+\.){2}[0-9]+$', v) == None: 130 | # Only try append sub version twice 131 | if append_times < 2: 132 | v += ".0" 133 | append_times += 1 134 | else: # DEFAULT TO 12.0.0 135 | return (DEFAULT_VERSION + ".0.0") 136 | return v 137 | 138 | version = normalize_version(self.version) 139 | url = "https://unofficial-builds.nodejs.org/download/release/v{}/node-v{}-{}-{}.tar.gz".format( 140 | version, version, current_os(), current_arch()) 141 | package_name = "node-v{}-{}-{}".format(version, current_os(), current_arch()) 142 | 143 | # Fetch the node build 144 | extra_path = '{}/node_install'.format(self.install_dir) 145 | package_path = '{}/node_package'.format(self.install_dir) 146 | fetch_and_extract(url, package_path, extra_path) 147 | 148 | # Set PATH 149 | node_path = '{}/{}/bin'.format(extra_path, package_name) 150 | sh.setenv('PATH', '{}{}{}'.format(node_path, os.pathsep, sh.getenv('PATH'))) 151 | 152 | 153 | class Node12(NodeJS): 154 | def __init__(self, **kwargs): 155 | super().__init__(version='12', **kwargs) 156 | 157 | 158 | class Node14(NodeJS): 159 | def __init__(self, **kwargs): 160 | super().__init__(version='14', **kwargs) 161 | 162 | 163 | class Node16(NodeJS): 164 | def __init__(self, **kwargs): 165 | super().__init__(version='16', **kwargs) 166 | 167 | 168 | class Node18(NodeJS): 169 | def __init__(self, **kwargs): 170 | super().__init__(version='18', **kwargs) 171 | -------------------------------------------------------------------------------- /builder/imports/s2n.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from builder.core.project import Project, Import 5 | 6 | 7 | config = { 8 | 'targets': ['linux', 'android'], 9 | 'test_steps': [], 10 | 'build_tests': False, 11 | # s2n contains pq crypto code where there is a mismatch (bounded array vs. pointer) in function 12 | # signatures between the declaration and implementation, leading to mismatched bound warnings that 13 | # turn into errors. While s2n should fix these as they come in, their existence shouldn't break our builds. 14 | 'cmake_args': ['-DUNSAFE_TREAT_WARNINGS_AS_ERRORS=OFF'] 15 | } 16 | 17 | 18 | class S2NImport(Import): 19 | def __init__(self, **kwargs): 20 | super().__init__( 21 | library=True, 22 | imports=['aws-lc'], 23 | config=config, 24 | url='https://github.com/aws/s2n-tls.git', 25 | **kwargs) 26 | 27 | 28 | class S2NProject(Project): 29 | def __init__(self, **kwargs): 30 | super().__init__( 31 | account='awslabs', 32 | imports=['aws-lc'], 33 | url='https://github.com/aws/s2n-tls.git', 34 | **config, 35 | **kwargs) 36 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-crt-builder/1c53c869ba916edd216e261eaf8e45be95967ec2/requirements.txt -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [pycodestyle] 2 | max_line_length = 120 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | from setuptools import setup, find_packages 5 | from subprocess import check_output 6 | 7 | git_branch = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], text=True).strip() 8 | version = 'v0.9+{}'.format(git_branch) 9 | if git_branch in ['master', 'main']: 10 | git_rev = check_output(['git', 'describe', '--abbrev=0', '--tags'], text=True).strip() 11 | version = git_rev 12 | 13 | setup( 14 | name="builder", 15 | version=version, 16 | packages=find_packages(), 17 | entry_points={ 18 | 'console_scripts': [ 19 | 'builder = builder.main:main' 20 | ] 21 | }, 22 | author='AWS SDK Common Runtime Team', 23 | author_email='aws-sdk-common-runtime@amazon.com', 24 | project_urls={ 25 | "Source": "https://github.com/awslabs/aws-crt-builder" 26 | } 27 | ) 28 | -------------------------------------------------------------------------------- /tests/.builder/scripts/test_action.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import Builder 5 | 6 | 7 | class Test(Builder.Action): 8 | def run(self, env): 9 | def _doit(env): 10 | sh = env.shell 11 | sh.exec('true', retries=3) 12 | return Builder.Script([_doit]) 13 | -------------------------------------------------------------------------------- /tests/.builder/verify_imports.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0. 3 | 4 | import Builder 5 | 6 | # By just loading this script, it should interrogate the environment and make sure that every class we expect exists 7 | 8 | CLASSES = ( 9 | Builder.Shell, 10 | Builder.Env, 11 | Builder.Action, 12 | Builder.Host, 13 | Builder.Project, 14 | Builder.Toolchain, 15 | Builder.CMakeBuild, 16 | Builder.CTestRun, 17 | Builder.DownloadDependencies, 18 | Builder.DownloadSource, 19 | Builder.InstallTools, 20 | Builder.Script, 21 | Builder.SetupCrossCICrtEnvironment, 22 | Builder.SetupEventStreamEchoServer, 23 | ) 24 | 25 | print('Found API classes available:') 26 | for cls in CLASSES: 27 | assert cls and cls.__name__ 28 | print(cls.__name__) 29 | -------------------------------------------------------------------------------- /tests/builder.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tests", 3 | "build_steps": [ 4 | "git --version" 5 | ], 6 | "test_steps": [], 7 | "needs_compiler": false 8 | } 9 | -------------------------------------------------------------------------------- /tests/data/lib-1/builder.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "lib-1", 3 | "variables": { 4 | "gradlew": "{source_dir}/gradlew" 5 | }, 6 | "build_steps": [ 7 | "echo \"build lib-1\"", 8 | "{source_dir}/gradlew" 9 | ], 10 | "post_build_steps": [ 11 | "echo \"post build lib-1\"", 12 | "{gradlew} postBuildTask" 13 | ], 14 | "test_steps": [ 15 | "echo \"test lib-1\"" 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /tests/test_env.py: -------------------------------------------------------------------------------- 1 | 2 | import builder.core.api # force API to load and expose the virtual module 3 | from collections import namedtuple 4 | import os 5 | import unittest 6 | import unittest.mock as mock 7 | 8 | from builder.core.env import Env 9 | from builder.core.project import Project 10 | from builder.core.spec import BuildSpec 11 | from builder.core.host import current_os 12 | 13 | # base config -- copy for tests 14 | here = os.path.dirname(os.path.abspath(__file__)) 15 | test_data_dir = os.path.join(here, 'data') 16 | _test_proj_config = { 17 | 'name': 'test-proj', 18 | 'search_dirs': [test_data_dir], 19 | 'path': here, 20 | } 21 | 22 | 23 | 24 | class TestEnv(unittest.TestCase): 25 | 26 | def setUp(self): 27 | # remove possible inter test behavior 28 | Project._projects.clear() 29 | 30 | def test_project_variants(self): 31 | """project variants should produce a config overridden by variant contents""" 32 | config = _test_proj_config.copy() 33 | config['variants'] = { 34 | 'test': { 35 | 'targets': { 36 | current_os(): { 37 | 'upstream': [ 38 | {'name': 'TEST'} 39 | ] 40 | } 41 | } 42 | } 43 | } 44 | 45 | p = Project(**config) 46 | spec = BuildSpec() 47 | env = Env({ 48 | 'project': p, 49 | 'args': namedtuple('Args', ['project', 'cli_config'])(None, None), 50 | 'branch': 'main', 51 | 'spec': spec, 52 | 'variant': 'test', 53 | }) 54 | # env.config should be the variant, not the defaults 55 | variant = env.config 56 | self.assertTrue(u for u in variant['upstream'] if u['name'] == 'TEST') 57 | -------------------------------------------------------------------------------- /tests/test_project.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import unittest.mock as mock 4 | 5 | from builder.core.project import Project 6 | from builder.core.spec import BuildSpec 7 | from builder.actions.script import Script 8 | 9 | import builder.core.api # force API to load and expose the virtual module 10 | here = os.path.dirname(os.path.abspath(__file__)) 11 | 12 | test_data_dir = os.path.join(here, 'data') 13 | 14 | # base config -- copy for tests 15 | _test_proj_config = { 16 | 'name': 'test-proj', 17 | 'search_dirs': [test_data_dir], 18 | 'path': here, 19 | 'run_tests': True, 20 | } 21 | 22 | 23 | def _collect_steps(step): 24 | """ 25 | collect the list of steps 26 | """ 27 | 28 | def _collect_steps_impl(out, curr): 29 | if isinstance(curr, list): 30 | for s in curr: 31 | _collect_steps_impl(out, s) 32 | elif isinstance(curr, Script): 33 | out.append(str(curr)) 34 | _collect_steps_impl(out, curr.commands) 35 | else: 36 | out.append(str(curr)) 37 | 38 | stack = [] 39 | _collect_steps_impl(stack, step) 40 | return stack 41 | 42 | 43 | def _fuzzy_find_step(step_stack, step, name): 44 | """ 45 | attempt to find a step name or value that either matches name or contains name as a fragment 46 | :return: tuple(step, stack idx) | None 47 | """ 48 | for i in range(len(step_stack)): 49 | s = step_stack[i] 50 | if s == name or name in s: 51 | return s, i 52 | return None 53 | 54 | 55 | def _step_exists(step, name): 56 | """ 57 | test if the step [name] exists in the set of [step]s 58 | """ 59 | step_stack = _collect_steps(step) 60 | return _fuzzy_find_step(step_stack, step, name) is not None 61 | 62 | 63 | def _dump_step(step): 64 | import pprint 65 | steps = _collect_steps(step) 66 | pprint.pprint(steps, width=240) 67 | 68 | 69 | class TestProject(unittest.TestCase): 70 | 71 | def setUp(self): 72 | # remove possible inter test behavior 73 | Project._projects.clear() 74 | 75 | def _format_step(self, step): 76 | step_stack = _collect_steps(step) 77 | return "\n".join(step_stack) 78 | 79 | def _assert_step_contains(self, step, name): 80 | if not _step_exists(step, name): 81 | steps = self._format_step(step) 82 | self.fail(f"{name} not contained in stack:\n{steps}") 83 | 84 | def _assert_step_not_contains(self, step, name): 85 | if _step_exists(step, name): 86 | steps = self._format_step(step) 87 | self.fail(f"unexpected step {name} found in stack:\n{steps}") 88 | 89 | def _assert_step_contains_all(self, step, names, ordered=True): 90 | for name in names: 91 | self._assert_step_contains(step, name) 92 | 93 | if ordered: 94 | stack = _collect_steps(step) 95 | steps = [_fuzzy_find_step(stack, step, name) for name in names] 96 | step_indices = [t[1] for t in steps] 97 | steps_in_order = all(step_indices[i] <= step_indices[i+1] for i in range(len(step_indices) - 1)) 98 | formatted_steps = self._format_step(step) 99 | self.assertTrue( 100 | steps_in_order, f"steps exist but not in order expected:\nexpected:{names}\nfound:\n{formatted_steps}") 101 | 102 | def test_build_defaults(self): 103 | """cmake build step should be default when not specified and toolchain exists""" 104 | p = Project(**_test_proj_config.copy()) 105 | mock_env = mock.Mock(name='MockEnv') 106 | steps = p.build(mock_env) 107 | self._assert_step_contains(steps, 'cmake build') 108 | 109 | def test_override_build_steps(self): 110 | """explict build steps take precedence""" 111 | config = _test_proj_config.copy() 112 | config['build_steps'] = ['foo'] 113 | p = Project(**config) 114 | mock_env = mock.Mock(name='MockEnv') 115 | steps = p.build(mock_env) 116 | self._assert_step_contains(steps, 'foo') 117 | 118 | def test_upstream_builds_first(self): 119 | """upstream dependencies should be built first""" 120 | config = _test_proj_config.copy() 121 | config['upstream'] = [ 122 | {'name': 'lib-1'} 123 | ] 124 | 125 | p = Project(**config) 126 | mock_env = mock.Mock(name='MockEnv', config=config) 127 | mock_env.spec = BuildSpec() 128 | steps = p.pre_build(mock_env) 129 | self._assert_step_contains_all(steps, ['build dependencies', 'build lib-1']) 130 | 131 | def test_default_test_step(self): 132 | """downstream tests should build by default""" 133 | config = _test_proj_config.copy() 134 | p = Project(**config) 135 | m_toolchain = mock.Mock(name='mock toolchain', cross_compile=False) 136 | mock_env = mock.Mock(name='MockEnv', config=config, 137 | toolchain=m_toolchain) 138 | steps = p.test(mock_env) 139 | self._assert_step_contains(steps, 'test') 140 | 141 | def test_downstream_tests_build_by_default(self): 142 | """downstream tests should build by default""" 143 | 144 | config = _test_proj_config.copy() 145 | config['downstream'] = [ 146 | { 147 | 'name': 'lib-1' 148 | } 149 | ] 150 | 151 | p = Project(**config) 152 | m_toolchain = mock.Mock(name='mock toolchain', cross_compile=False) 153 | mock_env = mock.Mock(name='MockEnv', config=config, project=p, toolchain=m_toolchain) 154 | mock_env.spec = BuildSpec() 155 | steps = p.build_consumers(mock_env) 156 | self._assert_step_contains_all(steps, ['test lib-1']) 157 | 158 | def test_downstream_post_build_runs_before_tests(self): 159 | """downstream post_build_steps should run before tests""" 160 | config = _test_proj_config.copy() 161 | config['downstream'] = [ 162 | { 163 | 'name': 'lib-1' 164 | } 165 | ] 166 | 167 | p = Project(**config) 168 | m_toolchain = mock.Mock(name='mock toolchain', cross_compile=False) 169 | mock_env = mock.Mock(name='MockEnv', config=config, project=p, toolchain=m_toolchain) 170 | mock_env.spec = BuildSpec() 171 | steps = p.build_consumers(mock_env) 172 | self._assert_step_contains_all(steps, ['post build lib-1', 'test lib-1']) 173 | 174 | def test_explicit_upstream_branch(self): 175 | """upstream with specific revision should override the detected branch""" 176 | config = _test_proj_config.copy() 177 | config['upstream'] = [ 178 | { 179 | 'name': 'lib-1', 180 | 'revision': 'explicit-branch' 181 | } 182 | ] 183 | 184 | p = Project(**config) 185 | spec = BuildSpec(target='linux') 186 | deps = p.get_dependencies(spec) 187 | self.assertEqual('explicit-branch', deps[0].revision) 188 | 189 | def test_upstream_targets_filtered_for_spec(self): 190 | """upstream with specific targets should only be applied if target matches current spec""" 191 | config = _test_proj_config.copy() 192 | config['upstream'] = [ 193 | { 194 | 'name': 'lib-1', 195 | 'targets': ['linux'] 196 | } 197 | ] 198 | 199 | p = Project(**config) 200 | spec = BuildSpec(target='macos') 201 | dependencies = p.get_dependencies(spec) 202 | self.assertEqual(0, len(dependencies), "dependencies should have filtered upstream with specific target") 203 | 204 | def test_project_source_dir_replaced(self): 205 | """project specific dependency variables should be replaced""" 206 | config = _test_proj_config.copy() 207 | config['upstream'] = [ 208 | { 209 | 'name': 'lib-1' 210 | } 211 | ] 212 | 213 | p = Project(**config) 214 | spec = BuildSpec(target='macos') 215 | dependencies = p.get_dependencies(spec) 216 | m_env = mock.Mock(name='MockEnv', config=config) 217 | steps = dependencies[0].post_build(m_env) 218 | self._assert_step_contains(steps, "{}/gradlew postBuildTask".format(os.path.join(test_data_dir, "lib-1"))) 219 | -------------------------------------------------------------------------------- /tests/test_toolchain.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | from builder.core.toolchain import Toolchain 5 | 6 | 7 | class ToolchainTest(unittest.TestCase): 8 | 9 | def test_all_compilers(self): 10 | toolchain = Toolchain() 11 | all_compilers = toolchain.all_compilers() 12 | self.assertGreaterEqual(len(all_compilers), 1) 13 | 14 | def test_default_compiler(self): 15 | toolchain = Toolchain() 16 | all_compilers = toolchain.all_compilers() 17 | default_compiler = toolchain.default_compiler() 18 | self.assertIn(default_compiler, all_compilers) 19 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import builder.core.util as utils 3 | 4 | 5 | class TestUtils(unittest.TestCase): 6 | 7 | def test_deep_get_dict(self): 8 | """test deep_get for dictionary types""" 9 | d = { 10 | 'foo': { 11 | 'bar': { 12 | 'baz': 'quux' 13 | } 14 | } 15 | } 16 | 17 | self.assertEqual('quux', utils.deep_get(d, 'foo.bar.baz')) 18 | self.assertEqual(None, utils.deep_get(d, 'foo.bar.qux')) 19 | 20 | def test_deep_get_attr(self): 21 | """test deep_get for object attributes""" 22 | 23 | class Foo(): 24 | pass 25 | 26 | obj = Foo() 27 | obj.foo = Foo() 28 | obj.foo.bar = Foo() 29 | obj.foo.bar.baz = 'quux' 30 | 31 | self.assertEqual('quux', utils.deep_get(obj, 'foo.bar.baz')) 32 | self.assertEqual(None, utils.deep_get(obj, 'foo.bar.qux')) 33 | 34 | def test_replace_variables(self): 35 | variables = {'x': 'foo', 'y': 'baz'} 36 | 37 | # string 38 | self.assertEqual("foo", utils.replace_variables("{x}", variables)) 39 | self.assertEqual("foo.bar.baz", utils.replace_variables( 40 | "{x}.bar.{y}", variables)) 41 | 42 | # lists 43 | self.assertEqual(["foo", "qux", "baz"], utils.replace_variables( 44 | ["{x}", "qux", "{y}"], variables)) 45 | 46 | # dict 47 | value = {"f": "{x}", "x": "qux", "b": "{y}"} 48 | expected = {"f": "foo", "x": "qux", "b": "baz"} 49 | self.assertEqual(expected, utils.replace_variables(value, variables)) 50 | 51 | def test_list_unique(self): 52 | expected = [1, 2, 3] 53 | self.assertEqual(expected, utils.list_unique([1, 1, 2, 1, 3, 2, 1, 3])) 54 | 55 | def test_tree_transform(self): 56 | tree = { 57 | 'foo': { 58 | 'bar': { 59 | 'baz': 2 60 | } 61 | }, 62 | 'baz': 2 63 | } 64 | 65 | def fn(x): return x * 2 66 | utils.tree_transform(tree, 'qux', fn) 67 | self.assertEqual(tree, tree) 68 | 69 | utils.tree_transform(tree, 'baz', fn) 70 | self.assertEqual(tree['baz'], 4) 71 | self.assertEqual(tree['foo']['bar']['baz'], 4) 72 | -------------------------------------------------------------------------------- /tests/test_vmod.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | from builder.core.vmod import VirtualModule 5 | 6 | 7 | class MockVirtualModule(VirtualModule): 8 | CONSTANT = 42 9 | 10 | class A: 11 | pass 12 | 13 | class B: 14 | pass 15 | 16 | def function(x): 17 | return x 18 | 19 | 20 | class TestVirtualModule(unittest.TestCase): 21 | 22 | def test_constant(self): 23 | from MockVirtualModule import CONSTANT 24 | self.assertEqual(CONSTANT, 42) 25 | 26 | def test_classes(self): 27 | from MockVirtualModule import A, B 28 | a = A() 29 | b = B() 30 | self.assertNotEqual(a.__class__, b.__class__) 31 | 32 | def test_function(self): 33 | from MockVirtualModule import CONSTANT, function 34 | self.assertEqual(42, function(CONSTANT)) 35 | --------------------------------------------------------------------------------