├── .github ├── dependabot.yml ├── scripts │ ├── build.ts │ ├── index.ts │ └── trawl.ts └── workflows │ ├── cd.installer.yml │ ├── cd.www.yml │ ├── cd.yml │ └── ci.yml ├── .gitignore ├── .vscode └── settings.json ├── LICENSE.txt ├── README.md ├── installer.sh ├── mash └── tea.yaml /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | schedule: 6 | interval: weekly 7 | -------------------------------------------------------------------------------- /.github/scripts/build.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx deno^1 run --allow-read --allow-write 2 | 3 | import { basename } from "https://deno.land/std@0.206.0/path/mod.ts"; 4 | import * as flags from "https://deno.land/std@0.206.0/flags/mod.ts"; 5 | import { Path } from "https://deno.land/x/libpkgx@v0.16.0/mod.ts"; 6 | import { Script } from "./index.ts"; 7 | 8 | const args = flags.parse(Deno.args); 9 | const indir = (s => Path.abs(s) ?? Path.cwd().join(s))(args['input']) 10 | const outdir = (s => Path.abs(s) ?? Path.cwd().join(s))(args['output']) 11 | const index_json_path = args['index-json'] 12 | 13 | if (!indir || !outdir || !index_json_path) { 14 | console.error(`usage: build.ts --input --output --index-json `); 15 | Deno.exit(64); 16 | } 17 | 18 | const scripts = JSON.parse(Deno.readTextFileSync(index_json_path)).scripts as Script[] 19 | 20 | const users: Record = {} 21 | 22 | for (const script of scripts) { 23 | const user = script.fullname.split('/')[0] 24 | users[user] ??= [] 25 | users[user].push(script) 26 | } 27 | 28 | // sort each entry in categories and users by the script birthtime 29 | for (const scripts of Object.values(users)) { 30 | scripts.sort((a, b) => new Date(b.birthtime).getTime() - new Date(a.birthtime).getTime()); 31 | } 32 | 33 | for (const user in users) { 34 | const d = outdir.join('u', user) 35 | const scripts = users[user].filter(({description}) => description) 36 | d.mkdir('p').join('index.json').write({ json: { scripts }, force: true, space: 2 }) 37 | } 38 | 39 | for (const script of scripts) { 40 | console.error(script) 41 | const [user, name] = script.fullname.split('/') 42 | const gh_slug = new URL(script.url).pathname.split('/').slice(1, 3).join('/') 43 | const infile = indir.join(gh_slug, 'scripts', basename(script.url)) 44 | 45 | infile.cp({ to: outdir.join('u', user).mkdir('p').join(name) }) 46 | 47 | if (!outdir.join(name).exists() && user != "pkgxdev") { // not already snagged 48 | infile.cp({ to: outdir.join(name) }) 49 | } 50 | } 51 | 52 | outdir.join('u/index.json').write({ json: { users }, force: true, space: 2}) 53 | -------------------------------------------------------------------------------- /.github/scripts/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx deno^1 run --allow-run=bash --allow-read=. 2 | 3 | import { join, basename, dirname } from "https://deno.land/std@0.206.0/path/mod.ts"; 4 | import { walk, exists } from "https://deno.land/std@0.206.0/fs/mod.ts"; 5 | import * as flags from "https://deno.land/std@0.206.0/flags/mod.ts"; 6 | 7 | if (import.meta.main) { 8 | const args = flags.parse(Deno.args); 9 | const inputdir = args['input'] 10 | 11 | if (!inputdir) { 12 | console.error(`usage: index.ts --input `); 13 | Deno.exit(1); 14 | } 15 | 16 | Deno.chdir(inputdir); 17 | 18 | const scripts: Script[] = [] 19 | for await (const slug of iterateGitRepos('.')) { 20 | console.error(`iterating: ${slug}`); 21 | for (const script of await get_metadata(slug)) { 22 | switch (basename(script.fullname)) { 23 | case "cache": 24 | case "demo-test-pattern": 25 | case "ensure": 26 | case "inventory": 27 | case "pantry-inventory": 28 | case "ls": 29 | case "magic": 30 | case "prune": 31 | case "run": 32 | case "stub": 33 | case "upgrade": 34 | case "cache+prune": 35 | case "cache+ls": 36 | case "cache+upgrade": 37 | case "demo": 38 | if (!script.fullname.startsWith("mxcl/")) { 39 | // ignore stuff that was forked from when the root repo had scripts 40 | // these scripts are now on @mxcl/… 41 | continue; 42 | } 43 | // fallthrough 44 | default: 45 | scripts.push(script) 46 | } 47 | } 48 | } 49 | 50 | scripts.sort((a, b) => b.birthtime.getTime() - a.birthtime.getTime()); 51 | 52 | console.log(JSON.stringify({ scripts }, null, 2)); 53 | } 54 | 55 | ////////////////////////////////////////////////////////////////////// lib 56 | async function extractMarkdownSection(filePath: string, sectionTitle: string): Promise { 57 | const data = await Deno.readTextFile(filePath); 58 | const lines = data.split('\n'); 59 | let capturing = false; 60 | let sectionContent = ''; 61 | 62 | for (let line of lines) { 63 | line = line.trim(); 64 | if (/^##\s+/.test(line)) { 65 | if (capturing) { 66 | break; // stop if we reach another header section 67 | } else if (normalize_title(line.replace(/^#+/, '')) == normalize_title(sectionTitle)) { 68 | capturing = true; 69 | } else if (line.replace(/^#+/, '').trim() == mash_title(sectionTitle)) { 70 | capturing = true; 71 | } else if (line.replace(/^#+/, '').trim() == `\`mash ${sectionTitle}\``) { 72 | capturing = true; 73 | } 74 | } else if (capturing) { 75 | sectionContent += line + '\n'; 76 | } 77 | } 78 | 79 | return chuzzle(sectionContent); 80 | 81 | function normalize_title(input: string) { 82 | return input.toLowerCase().replace(/[^a-z0-9]/g, '').trim(); 83 | } 84 | 85 | function mash_title(input: string) { 86 | const [category, ...name] = input.trim().split('-') 87 | return `\`mash ${category} ${name.join('-')}\`` 88 | } 89 | } 90 | 91 | export interface Script { 92 | fullname: string // the fully qualified name eg. user/category-script-name 93 | birthtime: Date 94 | description?: string 95 | avatar: string 96 | url: string 97 | README?: string 98 | cmd: string 99 | } 100 | 101 | async function* iterateGitRepos(basePath: string): AsyncIterableIterator { 102 | for await (const entry of walk(basePath, { maxDepth: 2 })) { 103 | if (entry.isDirectory && await exists(join(entry.path, '.git'))) { 104 | yield entry.path; 105 | } 106 | } 107 | } 108 | 109 | function chuzzle(ln: string): string | undefined { 110 | const out = ln.trim() 111 | return out || undefined; 112 | } 113 | 114 | async function get_metadata(slug: string) { 115 | 116 | const cmdString = `git -C '${slug}' log --pretty=format:'%H %aI' --name-only --diff-filter=AR -- scripts`; 117 | 118 | const process = Deno.run({ 119 | cmd: ["bash", "-c", cmdString], 120 | stdout: "piped" 121 | }); 122 | 123 | const output = new TextDecoder().decode(await process.output()); 124 | await process.status(); 125 | process.close(); 126 | 127 | const lines = chuzzle(output)?.split('\n') ?? []; 128 | const rv: Script[] = [] 129 | let currentCommitDate: string | undefined; 130 | 131 | for (let line of lines) { 132 | line = line.trim() 133 | 134 | if (line.includes(' ')) { // Detect lines with commit hash and date 135 | currentCommitDate = line.split(' ')[1]; 136 | } else if (line && currentCommitDate) { 137 | const filename = join(slug, line) 138 | if (!await exists(filename)) { 139 | // the file used to exist but has been deleted 140 | console.warn("skipping deleted: ", filename, line) 141 | continue 142 | } else { 143 | console.warn("%cadding: ", 'color:green', filename, line) 144 | } 145 | 146 | const repo_metadata = JSON.parse(await Deno.readTextFile(join(slug, 'metadata.json'))) 147 | 148 | const _stem = stem(filename).join('.') 149 | const README = await extractMarkdownSection(join(slug, 'README.md'), _stem); 150 | const birthtime = new Date(currentCommitDate!); 151 | const avatar = repo_metadata.avatar 152 | const fullname = join(dirname(slug), _stem) 153 | const url = repo_metadata.url +'/scripts/' + basename(filename) 154 | const description = README 155 | ? extract_description(README) 156 | : fullname == 'pkgxdev/demo-test-pattern' 157 | ? 'Prints a test pattern to your console' 158 | : undefined 159 | const cmd = `mash ${_stem}` 160 | 161 | rv.push({ fullname, birthtime, description, avatar, url, README, cmd }) 162 | } 163 | } 164 | 165 | return rv; 166 | 167 | function stem(filename: string): string[] { 168 | const base = basename(filename) 169 | const parts = base.split('.') 170 | if (parts.length == 1) { 171 | return parts.slice(0, 1) 172 | } else { 173 | return parts.slice(0, -1) // no extension, but allow eg. foo.bar.js to be foo.bar 174 | } 175 | } 176 | } 177 | 178 | function extract_description(input: string) { 179 | const regex = /^(.*?)\n#|^.*$/ms; 180 | const match = regex.exec(input); 181 | return match?.[1]?.trim(); 182 | } 183 | -------------------------------------------------------------------------------- /.github/scripts/trawl.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx deno^1 run --allow-run --allow-net --allow-env=GH_TOKEN --allow-write=. 2 | 3 | import * as flags from "https://deno.land/std@0.206.0/flags/mod.ts"; 4 | 5 | const args = flags.parse(Deno.args); 6 | const outdir = args['out'] 7 | 8 | const ghToken = Deno.env.get("GH_TOKEN"); 9 | if (!ghToken) { 10 | console.error("error: GitHub token is required. Set the GH_TOKEN environment variable."); 11 | Deno.exit(1) 12 | } 13 | 14 | Deno.mkdirSync(outdir, { recursive: true }); 15 | 16 | async function cloneAllForks(user: string, repo: string) { 17 | let page = 1; 18 | while (true) { 19 | const response = await fetch(`https://api.github.com/repos/${user}/${repo}/forks?page=${page}`, { 20 | headers: { 21 | "Authorization": `token ${ghToken}` 22 | } 23 | }); 24 | 25 | if (!response.ok) { 26 | throw new Error(`err: ${response.statusText}`); 27 | } 28 | 29 | const forks = await response.json(); 30 | if (forks.length === 0) { 31 | break; // No more forks 32 | } 33 | 34 | for (const fork of forks) { 35 | await clone(fork) 36 | 37 | Deno.writeTextFileSync(`${outdir}/${fork.full_name}/metadata.json`, JSON.stringify({ 38 | stars: fork.stargazers_count, 39 | license: fork.license?.spdx_id, 40 | avatar: fork.owner.avatar_url, 41 | url: fork.html_url + '/blob/' + fork.default_branch 42 | }, null, 2)) 43 | } 44 | 45 | page++; 46 | } 47 | } 48 | 49 | async function clone({clone_url, full_name, ...fork}: any) { 50 | console.log(`Cloning ${clone_url}...`); 51 | const proc = new Deno.Command("git", { args: ["-C", outdir, "clone", clone_url, full_name]}).spawn() 52 | if (!(await proc.status).success) { 53 | throw new Error(`err: ${await proc.status}`) 54 | } 55 | } 56 | 57 | await cloneAllForks('pkgxdev', 'mash'); 58 | 59 | // we have some general utility scripts here 60 | await clone({clone_url: 'https://github.com/pkgxdev/mash.git', full_name: 'pkgxdev/mash'}); 61 | // deploy expects this and fails otherwise 62 | Deno.writeTextFileSync(`${outdir}/pkgxdev/mash/metadata.json`, `{ 63 | "stars": 0, 64 | "license": "Apache-2.0", 65 | "avatar": "https://avatars.githubusercontent.com/u/140643783?v=4", 66 | "url": "https://github.com/pkgxdev/mash/blob/main" 67 | }`) 68 | -------------------------------------------------------------------------------- /.github/workflows/cd.installer.yml: -------------------------------------------------------------------------------- 1 | name: cd·installer 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - .github/workflows/cd.installer.yml 7 | push: 8 | branches: main 9 | paths: 10 | - installer.sh 11 | - .github/workflows/cd.installer.yml 12 | 13 | jobs: 14 | cd: 15 | if: github.repository == 'pkgxdev/mash' 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - uses: aws-actions/configure-aws-credentials@v4 21 | with: 22 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 23 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 24 | aws-region: us-east-2 25 | 26 | - run: aws s3 cp ./installer.sh s3://${{ secrets.AWS_S3_BUCKET }}/installer.sh 27 | 28 | - run: aws cloudfront create-invalidation 29 | --distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }} 30 | --paths / /installer.sh 31 | 32 | test: 33 | needs: cd 34 | runs-on: ubuntu-latest 35 | steps: 36 | - run: curl -L mash.pkgx.sh | sh 37 | - run: mash demo test-pattern 38 | -------------------------------------------------------------------------------- /.github/workflows/cd.www.yml: -------------------------------------------------------------------------------- 1 | name: cd·www 2 | 3 | on: 4 | push: 5 | branches: main 6 | paths: 7 | - .github/workflows/deploy.yml 8 | - .github/scripts/* 9 | - scripts/* 10 | pull_request: 11 | paths: 12 | - .github/workflows/deploy.yml 13 | schedule: 14 | - cron: '23 * * * *' 15 | workflow_dispatch: 16 | 17 | concurrency: 18 | group: ${{ github.workflow }}-${{ github.ref }} 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | build: 23 | if: github.repository == 'pkgxdev/mash' 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v4 27 | - uses: pkgxdev/setup@v4 28 | 29 | - name: trawl 30 | run: .github/scripts/trawl.ts --out ./build 31 | env: 32 | GH_TOKEN: ${{ github.token }} 33 | 34 | - name: index 35 | run: | 36 | mkdir out 37 | .github/scripts/index.ts --input ./build > ./out/index.json 38 | 39 | - uses: robinraju/release-downloader@v1.12 40 | with: 41 | latest: true 42 | fileName: mash-*.sh 43 | 44 | - run: mv mash-*.sh ./out/mash.sh 45 | 46 | - name: build 47 | run: .github/scripts/build.ts --input ./build --output ./out --index-json ./out/index.json 48 | 49 | - uses: actions/configure-pages@v4 50 | - uses: actions/upload-pages-artifact@v3 51 | with: 52 | path: out 53 | 54 | deploy: 55 | needs: build 56 | runs-on: ubuntu-latest 57 | if: ${{ github.event_name != 'pull_request' }} 58 | environment: 59 | name: github-pages 60 | url: ${{ steps.deployment.outputs.page_url }} 61 | permissions: 62 | pages: write # to deploy to Pages 63 | id-token: write # to verify the deployment originates from an appropriate source 64 | steps: 65 | - uses: actions/deploy-pages@v4 66 | id: deployment 67 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: cd·vx 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | concurrency: 9 | group: cd/vx/${{ github.event.release.tag_name }} 10 | cancel-in-progress: true 11 | 12 | permissions: 13 | contents: write 14 | 15 | jobs: 16 | retag: 17 | if: github.repository == 'pkgxdev/mash' 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v4 21 | - uses: fischerscode/tagger@v0 22 | with: 23 | prefix: v 24 | - run: | 25 | git tag -f latest 26 | git push origin latest --force 27 | 28 | attach: 29 | if: github.repository == 'pkgxdev/mash' 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v4 33 | 34 | - name: version 35 | run: sed -i "s/mash 0.0.0-dev/mash ${{ github.event.release.tag_name }}/g" ./mash 36 | 37 | - name: prep 38 | run: | 39 | mkdir out 40 | mv ./mash ./out/mash-${{ github.event.release.tag_name }}.sh 41 | 42 | - name: attach 43 | run: gh release upload ${{ github.event.release.tag_name }} ./out/mash-${{ github.event.release.tag_name }}.sh 44 | env: 45 | GH_TOKEN: ${{ github.token }} -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | pull_request: 5 | paths: mash 6 | 7 | jobs: 8 | test: 9 | if: github.repository == 'pkgxdev/mash' 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - uses: pkgxdev/setup@v4 14 | - run: ./mash demo test-pattern 15 | - run: ./mash demo test-pattern # check cache route works too 16 | - run: ./mash pkgxdev/demo-test-pattern # check fully qualified 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | /out 3 | /build 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "deno.enable": true, 3 | "deno.lint": true, 4 | "deno.unstable": true 5 | } 6 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2022–23 pkgx inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `mash` 2 | 3 | mash up millions of open source packages into monstrously powerful scripts. 4 | 5 | > [!CAUTION] 6 | > 7 | > We have not vetted any of the scripts `mash` can run and (currently) they 8 | > can do anything they want to your computer. 9 | > 10 | > We fully intend to add sandboxing and user reporting, but you have found 11 | > `mash` super early in its life so you must practice caution in your usage. 12 | > 13 | > All scripts can be read in advance via [mash.pkgx.sh] 14 | 15 |   16 | 17 | ## Quick Start 18 | 19 | ```sh 20 | brew install pkgxdev/made/mash || curl https://pkgx.sh | sh 21 | ``` 22 | 23 | > [!NOTE] 24 | > `mash` is a plain POSIX script. All it needs is `bash`, `curl`, and `pkgx`. 25 | > So if you like install the deps and just download it by itself. 26 | 27 | ## Getting Started 28 | 29 | Visit [mash.pkgx.sh] to see what scripts are available. Once you’ve found a 30 | script you want to run: 31 | 32 | ```sh 33 | mash transcribe --help # or https://mash.pkgx.sh/mxcl/transcribe 34 | ``` 35 | 36 |   37 | 38 | ## Contributing Scripts 39 | 40 | ### Writing Scripts 41 | 42 | Use any shell or scripting language you like. You specify it with the shebang: 43 | 44 | ```sh 45 | #!/usr/bin/env -S pkgx ruby 46 | ``` 47 | 48 | Generally it is sensible to specify constrained versions: 49 | 50 | ```sh 51 | #!/usr/bin/env -S pkgx python@3.11 52 | ``` 53 | 54 | ### Naming Scripts 55 | 56 | Names are first-come, first served. Please practice taste. We reserve the 57 | right to manage names. 58 | 59 | > [!TIP] 60 | > Extensions (eg. `.sh`, `.ts`) are *recommended* for GitHub readability. 61 | > They will be stripped from the mash execution name, eg. `foo-bar.ts` is 62 | > invoked via `mash foo-bar` and not `mash foo-bar.ts` 63 | 64 | ### Installing Language Dependencies 65 | 66 | Many languages or interpreters nowadays provide clear methods for importing 67 | language dependencies inside scripts, eg. `deno`, or `bun`. For other 68 | languages, read on. 69 | 70 | #### Ruby 71 | 72 | Use [Bundler](https://bundler.io): 73 | 74 | ```ruby 75 | #!/usr/bin/env -S pkgx ruby@3 76 | 77 | require 'bundler/inline' 78 | 79 | gemfile do 80 | source 'https://rubygems.org' 81 | gem 'ruby-macho', '~> 3' 82 | end 83 | ``` 84 | 85 | #### Python, Go, Rust, Node, etc. 86 | 87 | Typically for everything else, use [`scriptisto`], eg for Python: 88 | 89 | ```python 90 | #!/usr/bin/env -S pkgx +python@3.12 +virtualenv scriptisto 91 | 92 | # snip… type `scriptisto new python-pip` for the rest. 93 | ``` 94 | 95 | Use `scriptisto new` for a full listing of platforms Scriptisto makes 96 | available. 97 | 98 | 99 | ### Making your scripts available to `mash` 100 | 101 | 1. Fork [pkgxdev/mash] 102 | 2. Add scripts to `./scripts/` 103 | 3. Optionally edit the README adding a description 104 | 4. Push to your fork 105 | 5. Wait an hour and then check [mash.pkgx.sh] 106 | 107 | > [!NOTE] 108 | > Do not create a pull request for your scripts against this repo! 109 | > *We index the fork graph*. 110 | 111 | > [!IMPORTANT] 112 | > Step 3 (edit the README) is not optional if you want your script to appear 113 | > on the [mash frontpage][mash.pkgx.sh]! 114 | 115 | ### Running Your Scripts 116 | 117 | Assuming a script named `foo-bar`, while debugging just: 118 | 119 | ```sh 120 | chmod +x scripts/foo-bar 121 | ./scripts/foo-bar 122 | ``` 123 | 124 | After pushing we will index your script within 60 minutes. 125 | Once indexed your script can be run with: 126 | 127 | 1. `mash foo bar`; or 128 | 2. `mash your-username/foo-bar` 129 | 130 | > [!IMPORTANT] 131 | > `mash` will not be able to run your script until it is indexed. 132 | > If you can visit https://mash.pkgx.sh/USERNAME/SCRIPT-NAME then you’re 133 | > script has been indexed. 134 | 135 | > [!NOTE] 136 | > ### Naming Guidelines: A Call for Consideration 137 | > Think for a little about the names you are picking. We reserve the right 138 | > to rename egregious abuse of names and/or namespaces. If you feel a script 139 | > is misnamed open a ticket for discussion. 140 | 141 |   142 | 143 | 144 | ## Anatomy of Scripts 145 | 146 | Thanks to [`pkgx`], `mash` scripts can be written in any scripting language 147 | using any packages in the entire open source ecosystem. 148 | 149 | ### The Shebang 150 | 151 | The shebang is where you instruct `pkgx` on what scripting language you want. 152 | For example, if you want to write your script in `fish`: 153 | 154 | ```sh 155 | #!/usr/bin/env -S pkgx fish 156 | ``` 157 | 158 | You can also use pkgx `+pkg` syntax to add additional packages to the script’s 159 | running environment: 160 | 161 | ```sh 162 | #!/usr/bin/env -S pkgx +gh +git +gum +bpb bash 163 | ``` 164 | 165 | pkgx knows what packages to cache (it doesn’t pollute the user system with 166 | installs) based on the commands you want to run. There’s no figuring out 167 | pkg names, just type what you would type to run the command. 168 | 169 | > https://docs.pkgx.sh/scripts 170 | 171 | ### Documenting Your Script 172 | 173 | Rewrite the README in your fork so there is a `## mash scriptname` 174 | section. If your script is not the first to get a name then you would do 175 | `## mash username/scriptname` instead. 176 | 177 | * The paragraph after the `##` will be the [mash.pkgx.sh] description 178 | * Keep it short or it’ll get truncated when we display it 179 | * If you add a `### Usage` section we’ll list it on the web 180 | 181 | > [!IMPORTANT] 182 | > If you don’t provide a description your script won’t be listed on the 183 | > [mash frontpage][mash.pkgx.sh] (but the scripts can still be run by `mash`). 184 | 185 | ### Example Fork 186 | 187 | https://github.com/mxcl/mash 188 | 189 |   190 | 191 | 192 | ## Appendix 193 | 194 | `mash` has no secret sauce; users can just cURL your scripts and run them 195 | directly via `pkgx`: 196 | 197 | ```sh 198 | curl -O https://raw.githubusercontent.com/mxcl/mash/main/scripts/transcribe 199 | pkgx ./transcribe 200 | ``` 201 | 202 | Even `pkgx` isn’t required, they can source the dependencies themselves and 203 | run the script manually: 204 | 205 | ```sh 206 | $ bash ./transcribe 207 | # ^^ they will need to read the script to determine deps and interpreter 208 | ``` 209 | 210 | Hackers can use your script without installing `pkgx` or `mash` first via our 211 | cURL one-liner. This executes the script but doesn’t install anything: 212 | 213 | ```sh 214 | sh <(curl https://mash.pkgx.sh) transcribe 215 | ``` 216 | 217 | 218 | [mash.pkgx.sh]: https://mash.pkgx.sh 219 | [pkgxdev/mash]: https://github.com/pkgxdev/mash 220 | [`pkgx` shebang]: https://docs.pkgx.sh/scripts 221 | [`pkgx`]: https://pkgx.sh 222 | [`scriptisto`]: https://github.com/igor-petruk/scriptisto 223 | [actions]: https://github.com/pkgxdev/mash/actions 224 | -------------------------------------------------------------------------------- /installer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | if [ -n "$VERBOSE" -o -n "$GITHUB_ACTIONS" -a -n "$RUNNER_DEBUG" ]; then 6 | set -x 7 | fi 8 | 9 | if command -v mash >/dev/null; then 10 | if [ $# -gt 0 ]; then 11 | exec mash "$@" 12 | else 13 | echo "mash: already installed: $(which mash)" 1>&2 14 | exit 0 15 | fi 16 | fi 17 | 18 | if ! command -v pkgx >/dev/null; then 19 | if [ $# -gt 0 ]; then 20 | exec curl -Ssf https://pkgx.sh | sh -s -- +pkgx.sh/mash -- mash "$@" 21 | else 22 | curl -Ssf https://pkgx.sh | sh 23 | fi 24 | fi 25 | 26 | if [ $# -gt 0 ]; then 27 | exec pkgx +pkgx.sh/mash -- mash "$@" 28 | else 29 | tmp="$(mktemp)" 30 | curl -Ssf https://pkgxdev.github.io/mash/mash.sh > $tmp 31 | sudo install -m 0755 "$tmp" /usr/local/bin/mash 32 | echo "now type: mash" 1>&2 33 | fi 34 | -------------------------------------------------------------------------------- /mash: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx --quiet bash 2 | 3 | set -eo pipefail 4 | 5 | if [ "$1" == --version ]; then 6 | echo "mash 0.0.0-dev" 7 | exit 0 8 | fi 9 | 10 | if [ -z "$1" -o "$1" == "--help" ]; then 11 | echo "mash [args...]" 1>&2 12 | if [ "$1" == "--help" ]; then 13 | exit 0 14 | else 15 | exit 1 16 | fi 17 | fi 18 | 19 | if [ -n "$RUNNER_DEBUG" -a -n "$GITHUB_ACTIONS" ] || [ -n "$VERBOSE" ]; then 20 | set -x 21 | fi 22 | 23 | if ! command -v pkgx >/dev/null; then 24 | echo "error: pkgx not found" 1>&2 25 | exit 1 26 | fi 27 | if ! command -v curl >/dev/null; then 28 | curl() { 29 | pkgx --quiet curl "$@" 30 | } 31 | fi 32 | 33 | run() { 34 | SCRIPTNAME=$1 35 | shift 36 | 37 | # github won’t give us `github.com/pkgx` because they are meanies 38 | if [[ $SCRIPTNAME == u/pkgx/* ]]; then 39 | SCRIPTNAME="${SCRIPTNAME/pkgx/pkgxdev}" 40 | fi 41 | 42 | if [ "$(uname)" = Darwin ]; then 43 | CACHE="${XDG_CACHE_HOME:-$HOME/Library/Caches}/mash/$SCRIPTNAME" 44 | else 45 | CACHE="${XDG_CACHE_HOME:-$HOME/.cache}/mash/$SCRIPTNAME" 46 | fi 47 | 48 | get_etag() { 49 | grep -i ETag "$CACHE/headers.txt" | sed -e 's/ETag: "\(.*\)"/\1/I' | tr -d '\r' 50 | } 51 | 52 | if [ -f "$CACHE/headers.txt" ] && ETAG=$(get_etag); then 53 | ETAG=(--header If-None-Match:\ $ETAG) 54 | else 55 | mkdir -p "$CACHE" 56 | fi 57 | 58 | URL="https://pkgxdev.github.io/mash/$SCRIPTNAME" 59 | 60 | if curl \ 61 | "${ETAG[@]}" \ 62 | --silent \ 63 | --fail \ 64 | --show-error \ 65 | --dump-header "$CACHE/headers.txt" \ 66 | --output "$CACHE/script" \ 67 | "$URL" 68 | then 69 | chmod +x "$CACHE/script" 70 | exec "$CACHE/script" "$@" 71 | elif [ -f "$CACHE/script" ]; then 72 | echo "warn: couldn’t update check" 1>&2 73 | exec "$CACHE/script" "$@" 74 | else 75 | echo "error: $URL" 1>&2 76 | exit 2 77 | fi 78 | } 79 | 80 | cmd=$1 81 | shift 82 | 83 | case "$cmd-$1" in 84 | tea-erc20|media-yt2mp3|pantry-checker|tmux-sessionizer|demo-test-pattern) 85 | cmd="$cmd-$1" 86 | shift;; 87 | *) 88 | if [[ "$cmd" == pkgx/* || "$cmd" == pkgxdev/* ]]; then 89 | cmd="mxcl/$(basename $cmd)" 90 | fi 91 | 92 | if [[ "$cmd" == */* ]]; then 93 | cmd="u/$cmd" 94 | fi 95 | esac 96 | 97 | run $cmd "$@" 98 | -------------------------------------------------------------------------------- /tea.yaml: -------------------------------------------------------------------------------- 1 | # https://tea.xyz/what-is-this-file 2 | --- 3 | version: 1.0.0 4 | codeOwners: 5 | - '0x5E2DE4A68df811AAAD32d71fb065e6946fA5C8d9' # mxcl 6 | - '0xAb9A89fA4Bbd04Fc37116F0d7766866D001EA704' # jhheider 7 | quorum: 1 8 | --------------------------------------------------------------------------------