├── fixtures ├── projects │ ├── bar.com │ │ └── package.yml │ ├── unicode.org │ │ └── package.yml │ ├── npmjs.com │ │ ├── package.yml │ │ └── provider.yml │ ├── foo.com │ │ └── package.yml │ └── python.org │ │ └── package.yml ├── pathtests │ └── readYAMLAll.yaml ├── foo.com-5.43.0.tgz └── npm-integration-test │ ├── index.js │ └── package.json ├── .vscode ├── settings.json └── launch.json ├── vendor ├── sqlite3@0.10.0 │ ├── mod.ts │ └── src │ │ ├── util.ts │ │ ├── constants.ts │ │ └── ffi.ts └── README.md ├── examples ├── awscli │ ├── package.json │ └── index.mjs ├── whisper.mjs ├── whisper.js └── whisper.ts ├── .gitignore ├── .github └── workflows │ ├── cd.vx.yml │ └── ci.yml ├── scripts └── run-coverage.sh ├── src ├── hooks │ ├── useMoustaches.test.ts │ ├── useCache.ts │ ├── useFetch.ts │ ├── useFetch.test.ts │ ├── useOffLicense.ts │ ├── useShellEnv.test.ts │ ├── useSyncCache.test.ts │ ├── useCache.test.ts │ ├── useCellar.test.ts │ ├── useDownload.test.ts │ ├── useTestConfig.ts │ ├── useInventory.ts │ ├── useSync.test.ts │ ├── useMoustaches.ts │ ├── useInventory.test.ts │ ├── useConfig.test.ts │ ├── usePantry.test.ts │ ├── useCellar.ts │ ├── useSync.ts │ ├── useDownload.ts │ ├── useConfig.ts │ ├── useShellEnv.ts │ └── useSyncCache.ts ├── utils │ ├── error.ts │ ├── flock.ts │ ├── error.test.ts │ ├── pkg.ts │ ├── read-lines.ts │ ├── host.ts │ ├── host.test.ts │ ├── pkg.test.ts │ ├── misc.test.ts │ ├── misc.ts │ ├── Path.test.ts │ ├── semver.ts │ └── semver.test.ts ├── plumbing │ ├── which.test.ts │ ├── link.test.ts │ ├── resolve.ts │ ├── link.ts │ ├── install.test.ts │ ├── hydrate.test.ts │ ├── which.ts │ ├── install.ts │ ├── hydrate.ts │ └── resolve.test.ts ├── deps.ts ├── types.ts └── porcelain │ ├── run.test.ts │ ├── install.test.ts │ ├── install.ts │ └── run.ts ├── deno.json ├── mod.ts ├── README.md └── LICENSE.txt /fixtures/projects/bar.com/package.yml: -------------------------------------------------------------------------------- 1 | foo: bar 2 | -------------------------------------------------------------------------------- /fixtures/pathtests/readYAMLAll.yaml: -------------------------------------------------------------------------------- 1 | abc: xyz 2 | --- 3 | ijk: lmn -------------------------------------------------------------------------------- /fixtures/foo.com-5.43.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pkgxdev/libpkgx/HEAD/fixtures/foo.com-5.43.0.tgz -------------------------------------------------------------------------------- /fixtures/projects/unicode.org/package.yml: -------------------------------------------------------------------------------- 1 | build: 2 | # this node to prevent failure when parsing null YAML 3 | -------------------------------------------------------------------------------- /fixtures/projects/npmjs.com/package.yml: -------------------------------------------------------------------------------- 1 | provides: 2 | - bin/npm 3 | 4 | dependencies: 5 | unicode.org: ^73 6 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "deno.enable": true, 3 | "deno.lint": true, 4 | "deno.unstable": true, 5 | "deno.config": "deno.json" 6 | } 7 | -------------------------------------------------------------------------------- /vendor/sqlite3@0.10.0/mod.ts: -------------------------------------------------------------------------------- 1 | export * from "./src/database.ts"; 2 | export * from "./src/statement.ts"; 3 | export { SqliteError } from "./src/util.ts"; 4 | -------------------------------------------------------------------------------- /fixtures/projects/npmjs.com/provider.yml: -------------------------------------------------------------------------------- 1 | cmds: 2 | - truffle 3 | - chalk 4 | args: 5 | chalk: npx --yes --package=chalk-cli -- 6 | ...: [npx, --yes, --] 7 | -------------------------------------------------------------------------------- /examples/awscli/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pkgx-aws-cli", 3 | "main": "index.mjs", 4 | "dependencies": { 5 | "libpkgx": "^0.14", 6 | "aws-cli-js": "^2.2.3" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /fixtures/projects/foo.com/package.yml: -------------------------------------------------------------------------------- 1 | runtime: 2 | env: 3 | BAZ: ${{deps.bar.com.prefix}}/baz 4 | 5 | dependencies: 6 | bar.com: ^1 7 | 8 | provides: 9 | - bin/foo 10 | -------------------------------------------------------------------------------- /vendor/README.md: -------------------------------------------------------------------------------- 1 | # sqlite3@0.10.0 2 | 3 | vendored and modified to not download their binary of sqlite and instead be 4 | customizable to use our own. 5 | 6 | https://github.com/denodrivers/sqlite3/issues/119 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | /deno.lock 4 | /dist 5 | /cov_profile 6 | /cov_profile.lcov 7 | /examples/jfk.wav* 8 | /examples/node_modules 9 | /examples/package.json 10 | 11 | package-lock.json 12 | node_modules 13 | -------------------------------------------------------------------------------- /fixtures/projects/python.org/package.yml: -------------------------------------------------------------------------------- 1 | provides: 2 | - bin/python 3 | 4 | interprets: 5 | extensions: py 6 | args: python 7 | 8 | platforms: 9 | - darwin 10 | 11 | dependencies: 12 | unicode.org: ^71 13 | -------------------------------------------------------------------------------- /fixtures/npm-integration-test/index.js: -------------------------------------------------------------------------------- 1 | const { porcelain: { run } } = require("libpkgx") 2 | const { ConfigDefault } = require("libpkgx/hooks/useConfig") 3 | const { ConsoleLogger } = require("libpkgx/plumbing/install") 4 | 5 | console.log(ConfigDefault(), ConsoleLogger()) 6 | 7 | run("ls -h") 8 | -------------------------------------------------------------------------------- /fixtures/npm-integration-test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "npm-integration-test", 3 | "version": "1.0.0", 4 | "description": "used in ci/cd to verify our npm library imports without errors", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node index.js" 8 | }, 9 | "dependencies": { 10 | "libpkgx": "file:../../dist" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.github/workflows/cd.vx.yml: -------------------------------------------------------------------------------- 1 | name: cd·vx 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | concurrency: 9 | group: cd/vx/${{ github.event.release.tag_name }} 10 | cancel-in-progress: true 11 | 12 | permissions: 13 | contents: write 14 | 15 | jobs: 16 | retag: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | - uses: fischerscode/tagger@v0 21 | with: 22 | prefix: v 23 | -------------------------------------------------------------------------------- /examples/whisper.mjs: -------------------------------------------------------------------------------- 1 | /* 2 | npm install libpkgx 3 | node whisper.mjs 4 | */ 5 | 6 | import { porcelain } from "libpkgx" 7 | import https from "node:https" 8 | const { run } = porcelain 9 | import fs from "node:fs" 10 | 11 | const url = 'https://raw.githubusercontent.com/ggerganov/whisper.cpp/raw/master/samples/jfk.wav' 12 | 13 | await new Promise(done => 14 | https.get(url, rsp => 15 | rsp.pipe(fs.createWriteStream("jfk.wav")).on('finish', done))) 16 | 17 | await run("whisper.cpp jfk.wav") 18 | -------------------------------------------------------------------------------- /examples/whisper.js: -------------------------------------------------------------------------------- 1 | /* 2 | npm install libpkgx 3 | node whisper.js 4 | */ 5 | 6 | const { porcelain } = require("libpkgx") 7 | const https = require("node:https") 8 | const { run } = porcelain 9 | const fs = require("node:fs") 10 | 11 | const url = 'https://github.com/ggerganov/whisper.cpp/raw/master/samples/jfk.wav' 12 | 13 | const fetch = new Promise(done => 14 | https.get(url, rsp => 15 | rsp.pipe(fs.createWriteStream("jfk.wav")).on('finish', done))) 16 | 17 | fetch.then(() =>run("whisper.cpp jfk.wav")) 18 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "request": "launch", 6 | "name": "Debug Test", 7 | "type": "node", 8 | "cwd": "${workspaceFolder}", 9 | "runtimeExecutable": "deno", 10 | "runtimeArgs": [ 11 | "test", 12 | "--unstable", 13 | "--inspect-brk", 14 | "--allow-all", 15 | "${file}", 16 | ], 17 | "attachSimplePort": 9229 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /scripts/run-coverage.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx +genhtml +deno bash 2 | 3 | # TODO use git to determine file changes and open those reports 4 | 5 | set -eo pipefail 6 | 7 | rm -rf cov_profile 8 | deno task test --fail-fast --coverage=cov_profile 9 | deno coverage cov_profile --lcov --output=cov_profile/coverage_file.lcov 10 | genhtml -o cov_profile cov_profile/coverage_file.lcov 11 | 12 | if test "$1" != "--reload"; then 13 | open cov_profile/index.html 14 | else 15 | osascript -e 'tell application "Safari" to tell the current tab of the front window to do JavaScript "location.reload()"' 16 | fi 17 | -------------------------------------------------------------------------------- /src/hooks/useMoustaches.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals } from "@std/assert" 2 | import { useTestConfig } from "./useTestConfig.ts" 3 | import useMoustaches from "./useMoustaches.ts" 4 | import { Package } from "../types.ts" 5 | import SemVer from "../utils/semver.ts" 6 | 7 | Deno.test("useMoustaches", () => { 8 | const conf = useTestConfig() 9 | const moustaches = useMoustaches() 10 | 11 | const pkg: Package = { 12 | project: "pkgx.sh/test", 13 | version: new SemVer("1.0.0") 14 | } 15 | 16 | const tokens = moustaches.tokenize.all(pkg, []) 17 | assertEquals(tokens[0].to, conf.prefix.join(`pkgx.sh/test/v${pkg.version}`).string) 18 | }) 19 | -------------------------------------------------------------------------------- /src/hooks/useCache.ts: -------------------------------------------------------------------------------- 1 | import { Stowage } from "../types.ts" 2 | import useConfig from "./useConfig.ts" 3 | import host from "../utils/host.ts" 4 | 5 | export default function useCache() { 6 | return { path } 7 | } 8 | 9 | const path = (stowage: Stowage) => { 10 | const { pkg, type } = stowage 11 | const stem = pkg.project.replaceAll("/", "∕") 12 | 13 | let filename = `${stem}-${pkg.version}` 14 | if (type == 'bottle') { 15 | const { platform, arch } = stowage.host ?? host() 16 | filename += `+${platform}+${arch}.tar.${stowage.compression}` 17 | } else { 18 | filename += stowage.extname 19 | } 20 | 21 | return useConfig().cache.join(filename) 22 | } 23 | -------------------------------------------------------------------------------- /src/utils/error.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file no-explicit-any 2 | 3 | export function panic(message?: string): never { 4 | throw new Error(message) 5 | } 6 | 7 | declare global { 8 | interface Promise { 9 | swallow(errorClass?: new (...args: any) => any): Promise 10 | } 11 | } 12 | 13 | Promise.prototype.swallow = function(errorClass?: new (...args: any) => any) { 14 | return this.catch((err: unknown) => { 15 | if (errorClass && !(err instanceof errorClass)) { 16 | throw err; 17 | } 18 | }) 19 | } 20 | 21 | export class PkgxError extends Error { 22 | ctx: any 23 | constructor(msg: string, ctx?: any) { 24 | super(msg) 25 | this.ctx = ctx 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowJs": false, 4 | "strict": true 5 | }, 6 | "pkgx": "deno~2.0", 7 | "tasks": { 8 | "test": "deno test --parallel --unstable-fs --unstable-ffi --allow-all", 9 | "typecheck": "deno check ./mod.ts" 10 | }, 11 | "lint": { 12 | "include": ["src/"], 13 | "exclude": ["**/*.test.ts"] 14 | }, 15 | "test": { 16 | "include": ["src/"], 17 | "exclude": ["dist/"] 18 | }, 19 | "imports": { 20 | "@std/assert": "jsr:@std/assert@^1.0.6", 21 | "@std/testing": "jsr:@std/testing@^1.0.3", 22 | "is-what": "https://deno.land/x/is_what@v4.1.15/src/index.ts", 23 | "outdent": "https://deno.land/x/outdent@v0.8.0/mod.ts" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /examples/awscli/index.mjs: -------------------------------------------------------------------------------- 1 | import * as pkgx from 'libpkgx'; 2 | import * as awsclijs from 'aws-cli-js'; 3 | 4 | const { Options, Aws } = awsclijs 5 | const { porcelain: { install }, hooks: { useShellEnv } } = pkgx 6 | 7 | const opts = new Options(process.env.AWS_ACCESS_KEY_ID, process.env.AWS_SECRET_ACCESS_KEY) 8 | 9 | await installAwsCli() 10 | 11 | const aws = new Aws(opts) 12 | const users = await aws.command('iam list-users') 13 | 14 | console.log(users) 15 | 16 | /////////////////////////////////////////// 17 | async function installAwsCli() { 18 | const { map, flatten } = useShellEnv() 19 | const installations = await install('aws.amazon.com/cli') 20 | Object.assign(process.env, flatten(await map({ installations }))) 21 | } 22 | -------------------------------------------------------------------------------- /src/hooks/useFetch.ts: -------------------------------------------------------------------------------- 1 | import useConfig from "./useConfig.ts" 2 | 3 | // useFetch wraps the native Deno fetch api and inserts a User-Agent header 4 | export default function useFetch(input: string | URL | Request, init?: RequestInit | undefined): Promise { 5 | const { UserAgent } = useConfig() 6 | const requestInit = init ?? {} as RequestInit 7 | if (UserAgent) { 8 | requestInit.headers = { ...requestInit.headers, "User-Agent": UserAgent } 9 | } 10 | return _internals.fetch(input, requestInit) 11 | } 12 | 13 | // wrapped or DNT chokes on typechecking 14 | function chew(input: string | URL | Request, init?: RequestInit | undefined): Promise { 15 | return fetch(input, init) 16 | } 17 | 18 | export const _internals = { 19 | fetch: chew 20 | } 21 | -------------------------------------------------------------------------------- /src/utils/flock.ts: -------------------------------------------------------------------------------- 1 | import Path from "./Path.ts" 2 | 3 | export async function flock(path: Path) { 4 | let opts: Deno.OpenOptions | undefined 5 | 6 | // Deno.open seems to not like opening directories on Windows 7 | // even though my research suggests it should be fine 8 | if (Deno.build.os == 'windows') { 9 | path = path.join("lockfile") 10 | opts = { write: true, create: true } 11 | // ^^ or flock fails, NOTE that we have to pass create and the file must be created! lol wut? 12 | // ^^ write is also necessary 13 | } 14 | 15 | const file = await Deno.open(path.string, opts) 16 | 17 | await file.lock(true) 18 | 19 | return async () => { 20 | await file.unlock() 21 | file.close() 22 | if (Deno.build.os == 'windows') path.rm() 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/hooks/useFetch.test.ts: -------------------------------------------------------------------------------- 1 | import { stub, assertSpyCallArgs } from "@std/testing/mock" 2 | import { useTestConfig } from "./useTestConfig.ts"; 3 | import useFetch, { _internals } from "./useFetch.ts" 4 | 5 | 6 | Deno.test({ 7 | name: "fetch user-agent header check", 8 | async fn() { 9 | const UserAgent = "tests/1.2.3" 10 | useTestConfig({ UserAgent }) 11 | 12 | const url = "https://example.com"; 13 | const fetchStub = stub( 14 | _internals, 15 | "fetch", 16 | () => Promise.resolve(new Response("")), 17 | ); 18 | 19 | try { 20 | await useFetch(url, {}); 21 | } finally { 22 | fetchStub.restore(); 23 | } 24 | 25 | assertSpyCallArgs(fetchStub, 0, [url, { 26 | headers: {"User-Agent": UserAgent} 27 | }]); 28 | } 29 | }); 30 | -------------------------------------------------------------------------------- /examples/whisper.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S pkgx deno run --allow-read --allow-write --allow-run --allow-env --unstable --allow-net 2 | 3 | /* 4 | sh <(curl pkgx.sh) deno https://raw.githubusercontent.com/pkgxdev/lib/main/examples/whisper.ts 5 | */ 6 | 7 | import { porcelain } from "https://raw.github.com/pkgxdev/lib/v0/mod.ts" 8 | import { green } from "https://deno.land/std/fmt/colors.ts" 9 | const { run } = porcelain 10 | 11 | const url = 'https://github.com/ggerganov/whisper.cpp/raw/master/samples/jfk.wav' 12 | const rsp = await fetch(url) 13 | await Deno.writeFile("jfk.wav", rsp.body!) 14 | 15 | await run("whisper.cpp jfk.wav --output-json") 16 | 17 | const txt = Deno.readTextFileSync("jfk.wav.json") 18 | const json = JSON.parse(txt) 19 | 20 | console.log() 21 | console.log(green(json.transcription[0].text.trim())) 22 | -------------------------------------------------------------------------------- /src/hooks/useOffLicense.ts: -------------------------------------------------------------------------------- 1 | import { Stowage } from "../types.ts" 2 | import host from "../utils/host.ts" 3 | import useConfig from "./useConfig.ts"; 4 | 5 | type Type = 's3' 6 | 7 | export default function useOffLicense(_type: Type) { 8 | return { url, key } 9 | } 10 | 11 | function key(stowage: Stowage) { 12 | const rv = [stowage.pkg.project] 13 | if (stowage.type == 'bottle') { 14 | const { platform, arch } = stowage.host ?? host() 15 | rv.push(`${platform}/${arch}`) 16 | } 17 | let fn = `v${stowage.pkg.version}` 18 | if (stowage.type == 'bottle') { 19 | fn += `.tar.${stowage.compression}` 20 | } else { 21 | fn += stowage.extname 22 | } 23 | rv.push(fn) 24 | return rv.join("/") 25 | } 26 | 27 | function url(stowage: Stowage) { 28 | return new URL(`${useConfig().dist}/${key(stowage)}`) 29 | } 30 | -------------------------------------------------------------------------------- /src/plumbing/which.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals } from "@std/assert" 2 | import { isArray } from "is-what" 3 | import which from "./which.ts" 4 | import { useTestConfig } from "../hooks/useTestConfig.ts"; 5 | 6 | Deno.test("which('python')", async () => { 7 | useTestConfig() 8 | const foo = await which('python') 9 | assert(!isArray(foo)) 10 | assert(foo) 11 | }) 12 | 13 | Deno.test("which('chalk')", async () => { 14 | useTestConfig() 15 | const foo = await which('chalk') 16 | assert(!isArray(foo)) 17 | assert(foo) 18 | 19 | const bar = await which('chalk', { providers: false }) 20 | assertEquals(bar, undefined) 21 | }) 22 | 23 | Deno.test("which('nvim')", async () => { 24 | useTestConfig() 25 | const foo = await which('chalk', { all: true }) 26 | assert(isArray(foo)) 27 | assert(foo.length) 28 | }) 29 | -------------------------------------------------------------------------------- /src/deps.ts: -------------------------------------------------------------------------------- 1 | import * as is_what from "https://deno.land/x/is_what@v4.1.15/src/index.ts" 2 | export { is_what } 3 | 4 | import { type PlainObject } from "https://deno.land/x/is_what@v4.1.15/src/index.ts" 5 | export type { PlainObject } 6 | 7 | import * as outdent from "https://deno.land/x/outdent@v0.8.0/mod.ts" 8 | export { outdent } 9 | 10 | // importing super specifically to reduce final npm bundle size 11 | import * as crypto from "jsr:@std/crypto@1" 12 | import { moveSync } from "jsr:@std/fs@1" 13 | import { writeAll } from "jsr:@std/io@^0.225.0" 14 | import { parse as parseYaml, parseAll as parseYamlALL } from "jsr:@std/yaml@1" 15 | import { SEPARATOR as SEP, fromFileUrl } from "jsr:@std/path@1" 16 | 17 | const streams = { writeAll } 18 | const fs = { moveSync } 19 | const deno = { crypto, fs, streams, parseYaml, parseYamlALL, SEP, fromFileUrl } 20 | 21 | export { deno } 22 | -------------------------------------------------------------------------------- /src/utils/error.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals, assertRejects, assertThrows } from "@std/assert" 2 | import { PkgxError, panic } from "../utils/error.ts" 3 | 4 | Deno.test("errors", async test => { 5 | 6 | await test.step("panic", () => { 7 | assertThrows(() => panic("test msg"), "test msg") 8 | }) 9 | 10 | await test.step("swallow", async () => { 11 | await new Promise((_, reject) => reject(new BarError())).swallow(BarError) 12 | await new Promise((_, reject) => reject(new BazError())).swallow(BarError) 13 | assertRejects(() => new Promise((_, reject) => reject(new FooError())).swallow(BarError)) 14 | }) 15 | 16 | await test.step("new PkgxError()", () => { 17 | const e = new PkgxError("test msg", {ctx: 1}) 18 | assertEquals(e.message, "test msg") 19 | assertEquals(e.ctx.ctx, 1) 20 | }) 21 | }) 22 | 23 | class FooError extends Error 24 | {} 25 | 26 | class BarError extends Error 27 | {} 28 | 29 | class BazError extends BarError 30 | {} 31 | -------------------------------------------------------------------------------- /src/hooks/useShellEnv.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals } from "@std/assert" 2 | import { useTestConfig } from "./useTestConfig.ts" 3 | import useShellEnv from "./useShellEnv.ts" 4 | import SemVer from "../utils/semver.ts" 5 | 6 | Deno.test("useShellEnv", async () => { 7 | const { map, flatten } = useShellEnv() 8 | const { prefix } = useTestConfig() 9 | 10 | const installations = [{ 11 | pkg: { project: 'python.org', version: new SemVer('3.9.1') }, 12 | path: prefix.join("python.org/v3.9.1") 13 | }, { 14 | pkg: { project: 'npmjs.com', version: new SemVer('3.9.1') }, 15 | path: prefix.join("npmjs.com/v3.9.1") 16 | }] 17 | 18 | installations[0].path.join("bin").mkdir('p') 19 | installations[1].path.join("bin").mkdir('p') 20 | 21 | const env = flatten(await map({ installations })) 22 | 23 | const SEP = Deno.build.os == 'windows' ? ';' : ':' 24 | 25 | assertEquals(env.PATH, `${installations[0].path.join("bin")}${SEP}${installations[1].path.join("bin")}`) 26 | }) 27 | -------------------------------------------------------------------------------- /src/utils/pkg.ts: -------------------------------------------------------------------------------- 1 | import { Package, PackageRequirement } from "../types.ts" 2 | import * as semver from "./semver.ts" 3 | 4 | /// allows inputs `nodejs.org@16` when `semver.parse` would reject 5 | export function parse(input: string): PackageRequirement { 6 | input = input.trim() 7 | 8 | const match = input.match(/^(.+?)([\^=~<>@].+)?$/) 9 | if (!match) throw new Error(`invalid pkgspec: ${input}`) 10 | if (!match[2]) match[2] = "*" 11 | 12 | const project = match[1] 13 | const constraint = new semver.Range(match[2]) 14 | return { project, constraint } 15 | } 16 | 17 | export function compare(a: Package, b: Package): number { 18 | return a.project === b.project 19 | ? a.version.compare(b.version) 20 | : a.project.localeCompare(b.project) 21 | } 22 | 23 | export function str(pkg: Package | PackageRequirement): string { 24 | if (!("constraint" in pkg)) { 25 | return `${pkg.project}=${pkg.version}` 26 | } else if (pkg.constraint.set === "*") { 27 | return pkg.project 28 | } else { 29 | return `${pkg.project}${pkg.constraint}` 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import host, { SupportedPlatform, SupportedArchitecture } from "./utils/host.ts" 2 | import SemVer, { Range } from "./utils/semver.ts" 3 | import Path from "./utils/Path.ts" 4 | 5 | export interface Package { 6 | project: string 7 | version: SemVer 8 | } 9 | 10 | export interface PackageRequirement { 11 | project: string 12 | constraint: Range 13 | } 14 | 15 | export type PackageSpecification = Package | PackageRequirement 16 | 17 | export interface Installation { 18 | path: Path 19 | pkg: Package 20 | } 21 | 22 | /// remotely available package content (bottles or source tarball) 23 | export type Stowage = { 24 | type: 'src' 25 | pkg: Package 26 | extname: string 27 | } | { 28 | type: 'bottle' 29 | pkg: Package 30 | compression: 'xz' | 'gz' 31 | host?: { platform: SupportedPlatform, arch: SupportedArchitecture } 32 | } 33 | 34 | /// once downloaded, `Stowage` becomes `Stowed` 35 | export type Stowed = Stowage & { path: Path } 36 | 37 | export function StowageNativeBottle(opts: { pkg: Package, compression: 'xz' | 'gz' }): Stowage { 38 | return { ...opts, host: host(), type: 'bottle' } 39 | } 40 | -------------------------------------------------------------------------------- /src/hooks/useSyncCache.test.ts: -------------------------------------------------------------------------------- 1 | import specimen, { provides, dependencies, available, runtime_env, completion, companions } from "./useSyncCache.ts" 2 | import { useTestConfig } from "./useTestConfig.ts" 3 | import { assert, assertEquals } from "@std/assert" 4 | import { _internals } from "./useSync.ts" 5 | import usePantry from "./usePantry.ts" 6 | 7 | // NOTE actually syncs from github 8 | // TODO unit tests should not do actual network calls, instead make an implementation suite 9 | 10 | Deno.test({ 11 | name: "useSyncCache", 12 | ignore: Deno.build.os == 'windows', 13 | sanitizeResources: false, 14 | async fn() { 15 | useTestConfig() 16 | await _internals.sync(usePantry().prefix.parent()) 17 | await specimen() 18 | 19 | //TODO test better 20 | assert(available()) 21 | assertEquals((await provides('node'))?.[0], 'nodejs.org') 22 | // assertEquals((await dependencies('nodejs.org'))?.length, 3) 23 | assert(new Set(await completion('nod')).has("node")) 24 | assertEquals((await companions("nodejs.org"))?.[0]?.project, "npmjs.com") 25 | assert((await runtime_env("numpy.org"))?.["PYTHONPATH"]) 26 | } 27 | }) 28 | -------------------------------------------------------------------------------- /src/hooks/useCache.test.ts: -------------------------------------------------------------------------------- 1 | import { Stowage, StowageNativeBottle } from "../types.ts" 2 | import { useTestConfig } from "./useTestConfig.ts" 3 | import { assertEquals } from "@std/assert" 4 | import SemVer from "../utils/semver.ts" 5 | import useCache from "./useCache.ts" 6 | import host from "../utils/host.ts" 7 | 8 | Deno.test("useCache", () => { 9 | const { cache } = useTestConfig() 10 | const hw = (({ platform, arch }) => `${platform}+${arch}`)(host()) 11 | 12 | const stowage = StowageNativeBottle({ 13 | pkg: { project: "foo/bar", version: new SemVer("1.0.0") }, 14 | compression: "xz" 15 | }); 16 | assertEquals(useCache().path(stowage), cache.join(`foo∕bar-1.0.0+${hw}.tar.xz`)) 17 | 18 | const stowage2: Stowage = { 19 | type: 'bottle', 20 | pkg: stowage.pkg, 21 | host: { platform: "linux", arch: "aarch64" }, 22 | compression: 'xz' 23 | } 24 | assertEquals(useCache().path(stowage2), cache.join("foo∕bar-1.0.0+linux+aarch64.tar.xz")) 25 | 26 | const stowage3: Stowage = { 27 | pkg: stowage.pkg, 28 | type: "src", 29 | extname: ".tgz" 30 | } 31 | assertEquals(useCache().path(stowage3), cache.join("foo∕bar-1.0.0.tgz")) 32 | }) 33 | -------------------------------------------------------------------------------- /src/utils/read-lines.ts: -------------------------------------------------------------------------------- 1 | export default async function* readLines(file: Deno.FsFile): AsyncGenerator { 2 | const decoder = new TextDecoder(); 3 | const buffer = new Uint8Array(1024); // Buffer for reading chunks 4 | let leftover = ''; // Leftover string after splitting by newline 5 | 6 | while (true) { 7 | const bytesRead = await file.read(buffer); // Read a chunk of data 8 | 9 | if (bytesRead === null) break; // Exit the loop if the end of file is reached 10 | 11 | // Decode the chunk and add any leftover from the previous iteration 12 | const chunk = leftover + decoder.decode(buffer.subarray(0, bytesRead)); 13 | 14 | // Split the chunk by newline 15 | const lines = chunk.split('\n'); 16 | 17 | // Yield all lines except the last, which may be incomplete 18 | for (let i = 0; i < lines.length - 1; i++) { 19 | yield lines[i]; 20 | } 21 | 22 | // Keep the last part as leftover (which could be incomplete) 23 | leftover = lines[lines.length - 1]; 24 | } 25 | 26 | // If there's any remaining data in leftover, yield it as the last line 27 | if (leftover) { 28 | yield leftover; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/hooks/useCellar.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals, assertRejects } from "@std/assert" 2 | import SemVer, * as semver from "../utils/semver.ts" 3 | import { useTestConfig } from "./useTestConfig.ts" 4 | import useCellar from "./useCellar.ts" 5 | 6 | Deno.test("useCellar.resolve()", async () => { 7 | useTestConfig() 8 | 9 | const pkg = { project: "python.org", version: new SemVer("3.11.3") } 10 | const path = useCellar().shelf(pkg.project).join(`v${pkg.version}`).mkdir('p') 11 | path.join("cant-be-empty").touch() 12 | const installation = { pkg, path } 13 | 14 | await useCellar().resolve(installation) 15 | await useCellar().resolve(installation.pkg) 16 | await useCellar().resolve({ project: "python.org", constraint: new semver.Range("^3") }) 17 | await useCellar().resolve(installation.path) 18 | 19 | await assertRejects(() => useCellar().resolve({ project: "python.org", constraint: new semver.Range("@300")})) 20 | }) 21 | 22 | Deno.test("useCellar.has()", async () => { 23 | useTestConfig() 24 | 25 | const pkg = { project: "beyondgrep.com", version: new SemVer("3.6.0") } 26 | 27 | assertEquals(await useCellar().has(pkg), undefined) 28 | 29 | const path = useCellar().shelf(pkg.project).join(`v${pkg.version}`).mkdir('p') 30 | path.join("cant-be-empty").touch() 31 | const installation = { pkg, path } 32 | 33 | assertEquals(await useCellar().has(pkg), installation) 34 | }) 35 | -------------------------------------------------------------------------------- /src/hooks/useDownload.test.ts: -------------------------------------------------------------------------------- 1 | import { useTestConfig } from "./useTestConfig.ts" 2 | import { assert } from "@std/assert" 3 | import useDownload from "./useDownload.ts" 4 | 5 | //TODO don’t actually do http obv. 6 | 7 | Deno.test("etag-mtime-check", async runner => { 8 | useTestConfig({ PKGX_DIR: Deno.makeTempDirSync() }) 9 | 10 | const src = new URL("https://dist.pkgx.dev/ijg.org/versions.txt") 11 | const { download, cache } = useDownload() 12 | 13 | await runner.step("download", async () => { 14 | await download({src}) 15 | 16 | const mtimePath = cache({ for: src }).join("mtime") 17 | const etagPath = cache({ for: src }).join("etag") 18 | 19 | const mtime = await mtimePath.read() 20 | const etag = await etagPath.read() 21 | 22 | const rsp = await fetch(src, {}) 23 | const mtimeA = rsp.headers.get("Last-Modified") 24 | const etagA = rsp.headers.get("etag") 25 | 26 | assert(mtimeA === mtime) 27 | assert(etagA === etag) 28 | 29 | await rsp.body?.cancel() 30 | }) 31 | 32 | await runner.step("second download doesn’t http", async () => { 33 | let n = 0 34 | await download({src}, blob => { n += blob.length; return Promise.resolve() }) // for coverage 35 | assert(n > 0) 36 | }) 37 | 38 | await runner.step("second download doesn’t http and is fine if we do nothing", async () => { 39 | const dst = await download({src}) 40 | assert(dst.isFile()) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /vendor/sqlite3@0.10.0/src/util.ts: -------------------------------------------------------------------------------- 1 | import { SQLITE3_DONE, SQLITE3_MISUSE, SQLITE3_OK } from "./constants.ts"; 2 | import ffi from "./ffi.ts"; 3 | 4 | export const encoder = new TextEncoder(); 5 | 6 | export function toCString(str: string): Uint8Array { 7 | return encoder.encode(str + "\0"); 8 | } 9 | 10 | export function isObject(value: unknown): boolean { 11 | return typeof value === "object" && value !== null; 12 | } 13 | 14 | export class SqliteError extends Error { 15 | override name = "SqliteError"; 16 | 17 | constructor( 18 | public code: number = 1, 19 | message: string = "Unknown Error", 20 | ) { 21 | super(`${code}: ${message}`); 22 | } 23 | } 24 | 25 | export function unwrap(code: number, db?: Deno.PointerValue): void { 26 | const { 27 | sqlite3_errmsg, 28 | sqlite3_errstr, 29 | } = ffi(); 30 | 31 | if (code === SQLITE3_OK || code === SQLITE3_DONE) return; 32 | if (code === SQLITE3_MISUSE) { 33 | throw new SqliteError(code, "SQLite3 API misuse"); 34 | } else if (db !== undefined) { 35 | const errmsg = sqlite3_errmsg(db); 36 | if (errmsg === null) throw new SqliteError(code); 37 | throw new Error(Deno.UnsafePointerView.getCString(errmsg)); 38 | } else { 39 | throw new SqliteError( 40 | code, 41 | Deno.UnsafePointerView.getCString(sqlite3_errstr(code)!), 42 | ); 43 | } 44 | } 45 | 46 | export const buf = Deno.UnsafePointerView.getArrayBuffer; 47 | 48 | export const readCstr = Deno.UnsafePointerView.getCString; 49 | -------------------------------------------------------------------------------- /src/plumbing/link.test.ts: -------------------------------------------------------------------------------- 1 | import { useTestConfig } from "../hooks/useTestConfig.ts" 2 | import { assert } from "@std/assert" 3 | import SemVer from "../utils/semver.ts" 4 | import link from "./link.ts"; 5 | 6 | Deno.test({ 7 | name: "plumbing.link", 8 | ignore: Deno.build.os == 'windows', 9 | async fn(runner) { 10 | const pkg = {project: 'python.org', version: new SemVer('3.9.0')} 11 | 12 | await runner.step("link()", async () => { 13 | const { prefix } = useTestConfig() 14 | const path = prefix.join("python.org/v3.9.0").mkdir('p') 15 | const installation = { pkg, path } 16 | 17 | path.join("not-empty").touch() 18 | 19 | await link(installation) 20 | await link(installation) // test that calling twice serially works 21 | 22 | /// test symlinks work 23 | assert(installation.path.parent().join("v*").isDirectory()) 24 | assert(installation.path.parent().join(`v${pkg.version.major}`).isDirectory()) 25 | }) 26 | 27 | await runner.step("link() ×2 at once", async () => { 28 | const { prefix } = useTestConfig() 29 | const path = prefix.join("python.org/v3.9.0").mkdir('p') 30 | const installation = { pkg, path } 31 | 32 | path.join("not-empty").touch() 33 | 34 | const p1 = link(installation) 35 | const p2 = link(installation) 36 | 37 | await Promise.all([p1, p2]) 38 | 39 | /// test symlinks work 40 | assert(installation.path.parent().join("v*").isDirectory()) 41 | assert(installation.path.parent().join(`v${pkg.version.major}`).isDirectory()) 42 | }) 43 | } 44 | }) 45 | -------------------------------------------------------------------------------- /src/hooks/useTestConfig.ts: -------------------------------------------------------------------------------- 1 | import useConfig, { ConfigDefault } from "./useConfig.ts" 2 | import { fromFileUrl } from "jsr:@std/path@1" 3 | import Path from "../utils/Path.ts" 4 | 5 | export function useBaseTestConfig(env?: Record) { 6 | env ??= {} 7 | 8 | /// always prefer a new prefix 9 | env.PKGX_HOME ??= Path.mktemp().string 10 | 11 | const config = ConfigDefault(env) 12 | if ('UserAgent' in env) { 13 | config.UserAgent = env['UserAgent'] 14 | } 15 | 16 | return useConfig(config) 17 | } 18 | 19 | import usePantry from "./usePantry.ts" 20 | 21 | export function useTestConfig(env?: Record) { 22 | const conf = useBaseTestConfig(env) 23 | copyDirectory(srcroot.join("fixtures/projects").string, usePantry().prefix.mkdir('p').string) 24 | return conf 25 | } 26 | 27 | export const srcroot = (() => { 28 | // because when running via dnt the path of this file is different 29 | if (Path.cwd().parent().parent().join("fixtures").isDirectory()) { 30 | return Path.cwd().parent().parent() 31 | } else { 32 | return new Path(fromFileUrl(import.meta.url)).parent().parent().parent() 33 | } 34 | })() 35 | 36 | import { walkSync } from 'jsr:@std/fs@1' 37 | 38 | // deno/dnt has a broken shim for this function 39 | function copyDirectory(src: string, dest: string) { 40 | for (const entry of walkSync(src)) { 41 | const destPath = entry.path.replace(src, dest); 42 | 43 | if (entry.isDirectory) { 44 | Deno.mkdirSync(destPath, { recursive: true }); 45 | } else { 46 | Deno.copyFileSync(entry.path, destPath); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/hooks/useInventory.ts: -------------------------------------------------------------------------------- 1 | import { Package, PackageRequirement } from "../types.ts" 2 | import { DownloadError } from "./useDownload.ts" 3 | import SemVer from "../utils/semver.ts" 4 | import useFetch from "./useFetch.ts" 5 | import host from "../utils/host.ts" 6 | import "../utils/misc.ts" 7 | import useConfig from "./useConfig.ts"; 8 | 9 | export interface Inventory { 10 | [project: string]: { 11 | [platform: string]: { 12 | [arch: string]: string[] 13 | } 14 | } 15 | } 16 | 17 | const select = async (rq: PackageRequirement | Package) => { 18 | const versions = await _internals.get(rq) 19 | 20 | if ("constraint" in rq) { 21 | return rq.constraint.max(versions) 22 | } else if (versions.find(x => x.eq(rq.version))) { 23 | return rq.version 24 | } 25 | } 26 | 27 | const get = async (rq: PackageRequirement | Package) => { 28 | const { platform, arch } = host() 29 | const url = new URL(`${useConfig().dist}/${rq.project}/${platform}/${arch}/versions.txt`) 30 | const rsp = await useFetch(url) 31 | 32 | if (!rsp.ok) { 33 | throw new DownloadError(rsp.status, {src: url}) 34 | } 35 | 36 | const releases = await rsp.text() 37 | let versions = releases.trim().split("\n").map(x => new SemVer(x)) 38 | 39 | if (versions.length < 1) throw new Error(`No versions for ${rq.project}`) 40 | 41 | if (rq.project == 'openssl.org') { 42 | // workaround our previous sins 43 | const v = new SemVer("1.1.118") 44 | versions = versions.filter(x => x.neq(v)) 45 | } 46 | 47 | return versions 48 | } 49 | 50 | export default function useInventory() { 51 | return { select, get } 52 | } 53 | 54 | export const _internals = { get } 55 | -------------------------------------------------------------------------------- /src/porcelain/run.test.ts: -------------------------------------------------------------------------------- 1 | import { useTestConfig } from "../hooks/useTestConfig.ts" 2 | import { assertEquals, assertRejects } from "@std/assert" 3 | import undent from "outdent" 4 | import run from "./run.ts" 5 | 6 | Deno.test("porcelain.run", async runner => { 7 | 8 | const { prefix } = useTestConfig() 9 | 10 | const foo = prefix.join("foo.com/v5.43.0/bin").mkdir("p") 11 | 12 | if (Deno.build.os != 'windows') { 13 | foo.join("foo").write({ text: undent` 14 | #!/bin/sh 15 | if [ "$1" = "--fail" ]; then exit 1; fi 16 | echo "abcdef--" 17 | echo "ghijkl--" 1>&2 18 | `}).chmod(0o755) 19 | } else { 20 | foo.join("foo.bat").write({text: undent` 21 | @echo off 22 | IF "%~1"=="--fail" ( exit /b 1 ) 23 | echo abcdef-- 24 | echo ghijkl-- 1>&2 25 | `}) 26 | } 27 | 28 | prefix.join("bar.com/v1.2.3").mkdir('p').join("not-empty").touch() 29 | 30 | await runner.step("std", async () => { 31 | await run("foo --args") 32 | await run("foo") // tests no spaces branch 33 | 34 | await assertRejects(() => run([])) 35 | }) 36 | 37 | await runner.step("std(out|err)", async () => { 38 | const { stdout } = await run(["foo", "--args"], {stdout: true}) 39 | const nl = Deno.build.os === "windows" ? "\r\n" : "\n"; 40 | assertEquals(stdout, `abcdef--${nl}`) 41 | 42 | const { stderr } = await run(["foo", "--args"], {stderr: true}) 43 | const expected = Deno.build.os === "windows" ? 'ghijkl-- \r\n' : 'ghijkl--\n' 44 | assertEquals(stderr, expected) 45 | }) 46 | 47 | await runner.step("cmd fails", async () => { 48 | await assertRejects(() => run(["foo", "--fail"])) 49 | }) 50 | }) 51 | -------------------------------------------------------------------------------- /src/utils/host.ts: -------------------------------------------------------------------------------- 1 | import process from "node:process" 2 | 3 | // when we support more variants of these that require specification 4 | // we will tuple a version in with each eg. 'darwin' | ['windows', 10 | 11 | '*'] 5 | export const SupportedPlatforms = ["darwin" , "linux" , "windows"] as const 6 | export type SupportedPlatform = typeof SupportedPlatforms[number] 7 | 8 | export const SupportedArchitectures = ["x86-64", "aarch64"] as const 9 | export type SupportedArchitecture = typeof SupportedArchitectures[number] 10 | 11 | interface HostReturnValue { 12 | platform: SupportedPlatform 13 | arch: SupportedArchitecture 14 | target: string 15 | build_ids: [SupportedPlatform, SupportedArchitecture] 16 | } 17 | 18 | export default function host(): HostReturnValue { 19 | const platform = (() => { 20 | const platform = _internals.platform() 21 | switch (platform) { 22 | case "darwin": 23 | case "linux": 24 | case "windows": 25 | return platform 26 | default: 27 | console.warn(`operating incognito as linux (${platform})`) 28 | return 'linux' 29 | }})() 30 | 31 | const arch = (() => { 32 | const arch = _internals.arch() 33 | switch (arch) { 34 | case "arm64": 35 | return "aarch64" 36 | case "x64": 37 | return "x86-64" 38 | default: 39 | throw new Error(`unsupported-arch: ${arch}`) 40 | }})() 41 | 42 | const { target } = Deno.build 43 | 44 | return { 45 | platform, 46 | arch, 47 | target, 48 | build_ids: [platform, arch] 49 | } 50 | } 51 | 52 | const _internals = { 53 | arch: () => process.arch, 54 | platform: () => Deno.build.os 55 | } 56 | 57 | export { _internals } 58 | -------------------------------------------------------------------------------- /src/hooks/useSync.test.ts: -------------------------------------------------------------------------------- 1 | import specimen, { _internals } from "./useSync.ts" 2 | import { useTestConfig } from "./useTestConfig.ts" 3 | import * as mock from "@std/testing/mock" 4 | import { assert } from "@std/assert" 5 | import usePantry from "./usePantry.ts" 6 | 7 | // NOTE actually syncs from github 8 | // TODO unit tests should not do actual network calls, instead make an implementation suite 9 | 10 | Deno.test("useSync", async runner => { 11 | const stub = mock.stub(_internals, "cache", async () => {}) 12 | 13 | try { 14 | await runner.step("w/o git", async () => { 15 | const conf = useTestConfig({}) 16 | usePantry().prefix.rm({ recursive: true }) // we need to delete the fixtured pantry 17 | assert(conf.git === undefined) 18 | await test() 19 | }) 20 | 21 | await runner.step({ 22 | name: "w/git", 23 | ignore: Deno.build.os == 'windows' && !Deno.env.get("CI"), 24 | async fn() { 25 | const conf = useTestConfig({ PATH: "/usr/bin" }) 26 | usePantry().prefix.rm({ recursive: true }) // we need to delete the fixtured pantry 27 | assert(conf.git !== undefined) 28 | await test() 29 | 30 | // test the “already cloned” code-path 31 | await specimen() 32 | } 33 | }) 34 | 35 | async function test() { 36 | let errord = false 37 | try { 38 | await usePantry().project("gnu.org/gcc").available() 39 | } catch { 40 | errord = true 41 | } 42 | assert(errord, `should be no pantry but there is! ${usePantry().prefix}`) 43 | 44 | await specimen() 45 | 46 | assert(await usePantry().project("gnu.org/gcc").available()) 47 | } 48 | 49 | } finally { 50 | stub.restore() 51 | } 52 | 53 | }) 54 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - deno.json 7 | - deno.lock 8 | - '**/*.ts' 9 | - fixtures 10 | - .github/workflows/ci.yml 11 | push: 12 | branches: main 13 | paths: 14 | - deno.json 15 | - deno.lock 16 | - '**/*.ts' 17 | - fixtures 18 | - .github/workflows/ci.yml 19 | workflow_dispatch: 20 | 21 | concurrency: 22 | group: ${{ github.ref }} 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | tests: 27 | runs-on: ${{ matrix.os }} 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | os: 32 | - macos-latest 33 | - ubuntu-latest 34 | 35 | steps: 36 | - uses: actions/checkout@v4 37 | - uses: denoland/setup-deno@v1 38 | - run: deno cache mod.ts 39 | 40 | - run: deno task test 41 | --coverage=cov_profile 42 | --no-check # ⬆signal∶noise & ∵ we have `jobs.typecheck` 43 | 44 | - run: deno coverage cov_profile --lcov --output=cov_profile.lcov 45 | 46 | - uses: coverallsapp/github-action@v2 47 | with: 48 | path-to-lcov: cov_profile.lcov 49 | parallel: true 50 | flag-name: ${{ matrix.os }} 51 | 52 | upload-coverage: 53 | needs: tests 54 | runs-on: ubuntu-latest 55 | steps: 56 | - uses: coverallsapp/github-action@v2 57 | with: 58 | parallel-finished: true 59 | 60 | lint: 61 | runs-on: ubuntu-latest 62 | steps: 63 | - uses: actions/checkout@v4 64 | - uses: denoland/setup-deno@v1 65 | - run: deno lint 66 | 67 | typecheck: 68 | runs-on: ubuntu-latest 69 | steps: 70 | - uses: actions/checkout@v4 71 | - uses: denoland/setup-deno@v1 72 | - run: deno task typecheck 73 | -------------------------------------------------------------------------------- /src/hooks/useMoustaches.ts: -------------------------------------------------------------------------------- 1 | import { Package, Installation } from "../types.ts" 2 | import SemVer from "../utils/semver.ts" 3 | import useConfig from "./useConfig.ts" 4 | import useCellar from "./useCellar.ts" 5 | 6 | function tokenizePackage(pkg: Package) { 7 | return [{ from: "prefix", to: useCellar().keg(pkg).string }] 8 | } 9 | 10 | function tokenizeVersion(version: SemVer, prefix = 'version') { 11 | const rv = [ 12 | { from: prefix, to: `${version}` }, 13 | { from: `${prefix}.major`, to: `${version.major}` }, 14 | { from: `${prefix}.minor`, to: `${version.minor}` }, 15 | { from: `${prefix}.patch`, to: `${version.patch}` }, 16 | { from: `${prefix}.marketing`, to: `${version.major}.${version.minor}` }, 17 | { from: `${prefix}.build`, to: version.build.join('+') }, 18 | { from: `${prefix}.raw`, to: version.raw }, 19 | ] 20 | if ('tag' in version) { 21 | rv.push({from: `${prefix}.tag`, to: (version as unknown as {tag: string}).tag}) 22 | } 23 | return rv 24 | } 25 | 26 | function apply(input: string, map: { from: string, to: string }[]) { 27 | return map.reduce((acc, {from, to}) => 28 | acc.replace(new RegExp(`(^\\$)?{{\\s*${from}\\s*}}`, "g"), to), 29 | input) 30 | } 31 | 32 | export default function() { 33 | const config = useConfig() 34 | const base = { 35 | apply, 36 | tokenize: { 37 | version: tokenizeVersion, 38 | pkg: tokenizePackage 39 | } 40 | } 41 | 42 | const deps = (deps: Installation[]) => { 43 | const map: {from: string, to: string}[] = [] 44 | for (const dep of deps ?? []) { 45 | map.push({ from: `deps.${dep.pkg.project}.prefix`, to: dep.path.string }) 46 | map.push(...base.tokenize.version(dep.pkg.version, `deps.${dep.pkg.project}.version`)) 47 | } 48 | return map 49 | } 50 | 51 | const pkgx = () => [{ from: "pkgx.prefix", to: config.prefix.string }] 52 | 53 | const all = (pkg: Package, deps_: Installation[]) => [ 54 | ...deps(deps_), 55 | ...tokenizePackage(pkg), 56 | ...pkgx(), 57 | ...base.tokenize.version(pkg.version), 58 | ] 59 | 60 | return { 61 | apply: base.apply, 62 | tokenize: { 63 | ...base.tokenize, 64 | deps, pkgx, all 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/utils/host.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertThrows, fail } from "@std/assert" 2 | import host, { _internals, SupportedPlatform } from "./host.ts" 3 | import { stub } from "@std/testing/mock" 4 | 5 | Deno.test({ 6 | name:"host()", 7 | ignore: Deno.build.os == 'windows', 8 | async fn() 9 | { 10 | const uname = [await run("uname"), await run("uname -m")] 11 | 12 | const { platform, arch } = host() 13 | switch (uname[0]) { 14 | case "Darwin": 15 | assertEquals(platform, "darwin") 16 | break 17 | case "Linux": 18 | assertEquals(platform, "linux") 19 | break 20 | default: 21 | fail() 22 | } 23 | 24 | switch (uname[1]) { 25 | case "aarch64": 26 | case "arm64": 27 | assertEquals(arch, "aarch64") 28 | break 29 | case "x86_64": 30 | assertEquals(arch, "x86-64") 31 | break 32 | default: 33 | fail() 34 | } 35 | 36 | async function run(cmd: string) { 37 | const parts = cmd.split(" ") 38 | const out = await new Deno.Command(parts[0], { 39 | args: parts.slice(1), 40 | stdout: "piped", 41 | }).output() 42 | return new TextDecoder().decode(out.stdout).trim() 43 | } 44 | }}) 45 | 46 | Deno.test({ 47 | name: "host()", 48 | ignore: Deno.build.os != 'windows', 49 | fn() { 50 | assertEquals(host().platform, "windows") 51 | assertEquals(host().arch, "x86-64") 52 | } 53 | }) 54 | 55 | Deno.test("host().windows.arm64", () => { 56 | const s1 = stub(_internals, "platform", () => "windows" as SupportedPlatform) 57 | const s2 = stub(_internals, "arch", () => "arm64" as "arm64" | "x64") 58 | try { 59 | const { platform, arch } = host() 60 | assertEquals(platform, "windows") 61 | assertEquals(arch, "aarch64") 62 | } finally { 63 | s1.restore() 64 | s2.restore() 65 | } 66 | }) 67 | 68 | Deno.test("host().aix.x", () => { 69 | const s1 = stub(_internals, "platform", () => "aix" as SupportedPlatform) 70 | try { 71 | const { platform } = host() 72 | assertEquals(platform, "linux") 73 | } finally { 74 | s1.restore() 75 | } 76 | }) 77 | 78 | Deno.test("host().x.foo", () => { 79 | const s1 = stub(_internals, "arch", () => "foo" as "arm64" | "x64") 80 | try { 81 | assertThrows(host) 82 | } finally { 83 | s1.restore() 84 | } 85 | }) 86 | -------------------------------------------------------------------------------- /src/hooks/useInventory.test.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file require-await no-explicit-any 2 | import { assertEquals, assertRejects } from "@std/assert" 3 | import SemVer, * as semver from "../utils/semver.ts" 4 | import * as mock from "@std/testing/mock" 5 | import { _internals } from "./useFetch.ts" 6 | import specimen from "./useInventory.ts" 7 | 8 | Deno.test("useInventory", async runner => { 9 | await runner.step("select()", async () => { 10 | const stub = mock.stub(_internals, "fetch", async () => { 11 | return { 12 | ok: true, 13 | status: 200, 14 | async text() { 15 | return "1.2.3\n1.2.4" 16 | } 17 | } as any 18 | }) 19 | 20 | try { 21 | assertEquals( 22 | (await specimen().select({project: "foo", version: new SemVer("1.2.3")}))?.toString(), 23 | "1.2.3" 24 | ) 25 | 26 | assertEquals( 27 | (await specimen().select({project: "foo", constraint: new semver.Range("=1.2.3")}))?.toString(), 28 | "1.2.3" 29 | ) 30 | } finally { 31 | stub.restore() 32 | } 33 | }) 34 | 35 | await runner.step("fail HTTP", async () => { 36 | const stub = mock.stub(_internals, "fetch", async () => { 37 | return { 38 | ok: false, 39 | status: 404 40 | } as any 41 | }) 42 | 43 | try { 44 | assertRejects(() => specimen().select({project: "foo", version: new SemVer("1.2.3")})) 45 | } finally { 46 | stub.restore() 47 | } 48 | }) 49 | 50 | await runner.step("fail no versions", async () => { 51 | const stub = mock.stub(_internals, "fetch", async () => { 52 | return { 53 | ok: true, 54 | status: 200, 55 | async text() { return "" } 56 | } as any 57 | }) 58 | 59 | try { 60 | assertRejects(() => specimen().select({project: "foo", version: new SemVer("1.2.3")})) 61 | } finally { 62 | stub.restore() 63 | } 64 | }) 65 | 66 | await runner.step("openssl hack", async () => { 67 | const stub = mock.stub(_internals, "fetch", async () => { 68 | return { 69 | ok: true, 70 | status: 200, 71 | async text() { return "1.1.118\n1.1.117" } 72 | } as any 73 | }) 74 | 75 | try { 76 | assertEquals( 77 | (await specimen().select({project: "openssl.org", constraint: new semver.Range("^1")}))?.toString(), 78 | "1.1.117") 79 | } finally { 80 | stub.restore() 81 | } 82 | }) 83 | }) 84 | -------------------------------------------------------------------------------- /mod.ts: -------------------------------------------------------------------------------- 1 | import "./src/utils/misc.ts" 2 | import { flatmap, validate } from "./src/utils/misc.ts" 3 | 4 | import host, { SupportedArchitecture, SupportedPlatform } from "./src/utils/host.ts" 5 | import SemVer, * as semver from "./src/utils/semver.ts" 6 | import Path from "./src/utils/Path.ts" 7 | 8 | export * as types from "./src/types.ts" 9 | import * as pkg from "./src/utils/pkg.ts" 10 | 11 | import { panic, PkgxError } from "./src/utils/error.ts" 12 | import useConfig from "./src/hooks/useConfig.ts" 13 | import useOffLicense from "./src/hooks/useOffLicense.ts" 14 | import useCache from "./src/hooks/useCache.ts" 15 | import useCellar, { InstallationNotFoundError} from "./src/hooks/useCellar.ts" 16 | import useMoustaches from "./src/hooks/useMoustaches.ts" 17 | import usePantry, { PantryError, PantryParseError, PantryNotFoundError, PackageNotFoundError } from "./src/hooks/usePantry.ts" 18 | import useFetch from "./src/hooks/useFetch.ts" 19 | import useDownload, { DownloadError } from "./src/hooks/useDownload.ts" 20 | import useShellEnv from "./src/hooks/useShellEnv.ts" 21 | import useInventory from "./src/hooks/useInventory.ts" 22 | import hydrate from "./src/plumbing/hydrate.ts" 23 | import which from "./src/plumbing/which.ts" 24 | import link from "./src/plumbing/link.ts" 25 | import install, { ConsoleLogger } from "./src/plumbing/install.ts" 26 | import resolve, { ResolveError } from "./src/plumbing/resolve.ts" 27 | import useSync from "./src/hooks/useSync.ts" 28 | import run, { RunError } from "./src/porcelain/run.ts" 29 | import porcelain_install from "./src/porcelain/install.ts" 30 | 31 | const utils = { 32 | pkg, host, flatmap, validate, panic, ConsoleLogger 33 | } 34 | 35 | const hooks = { 36 | useCache, 37 | useCellar, 38 | useConfig, 39 | useDownload, 40 | useFetch, 41 | useInventory, 42 | useMoustaches, 43 | useOffLicense, 44 | usePantry, 45 | useShellEnv, 46 | useSync, 47 | } 48 | 49 | const plumbing = { 50 | hydrate, 51 | link, 52 | install, 53 | resolve, 54 | which 55 | } 56 | 57 | const porcelain = { 58 | install: porcelain_install, 59 | run 60 | } 61 | 62 | export { 63 | utils, hooks, plumbing, porcelain, 64 | semver, 65 | PkgxError, 66 | RunError, 67 | ResolveError, 68 | PantryError, PantryParseError, PantryNotFoundError, PackageNotFoundError, 69 | InstallationNotFoundError, 70 | DownloadError 71 | } 72 | 73 | /// export types 74 | // we cannot add these to the above objects or they cannot be used as types 75 | export { Path, SemVer } 76 | export * from "./src/types.ts" 77 | export type { SupportedArchitecture, SupportedPlatform } 78 | -------------------------------------------------------------------------------- /src/porcelain/install.test.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file require-await no-explicit-any 2 | import { useTestConfig } from "../hooks/useTestConfig.ts" 3 | import SemVer, * as semver from "../utils/semver.ts" 4 | import install, { ConsoleLogger, _internals } from "./install.ts" 5 | import { assert, assertEquals } from "@std/assert" 6 | import type { Installation } from "../types.ts" 7 | import usePantry from "../hooks/usePantry.ts" 8 | import useConfig from "../hooks/useConfig.ts" 9 | import { stub } from "@std/testing/mock" 10 | import Path from "../utils/Path.ts"; 11 | 12 | Deno.test("porcelain.install", async () => { 13 | useTestConfig() 14 | usePantry().prefix.rm({ recursive: true }) 15 | 16 | const stub1 = stub(_internals, "hydrate", async () => ({ 17 | pkgs: [ 18 | {project: "foo.com", constraint: new semver.Range("*")}, 19 | {project: "bar.org", constraint: new semver.Range("^2")} 20 | ] 21 | } as any)) 22 | 23 | const stub2 = stub(_internals, "resolve", async () => ({ 24 | pending: [{project: "foo.com", version: new SemVer("1.0.0")}], 25 | installed: [{ pkg: {project: "bar.org", version: new SemVer("2.3.4")}, path: Path.root}], 26 | pkgs: [] 27 | })) 28 | 29 | const stub3 = stub(_internals, "link", async (install) => 30 | assertEquals((install as any).pkg.project, "foo.com") 31 | ) 32 | 33 | const installation: Installation = { 34 | pkg: { 35 | project: "foo.com", 36 | version: new SemVer("1.0.0") 37 | }, 38 | path: useConfig().prefix.join("foo.com", "v1.0.0") 39 | } 40 | 41 | const stub4 = stub(_internals, "install", async ({ project }, logger) => { 42 | assertEquals(project, "foo.com") 43 | 44 | // for coverage 45 | logger!.installing!({ pkg: installation.pkg, progress: 1 }) 46 | logger!.downloading!({pkg: installation.pkg, src: new URL("http://example.com"), dst: Path.root, rcvd: 0, total: 100}) 47 | 48 | return installation 49 | }) 50 | 51 | const stub5 = stub(_internals, "useSync", async () => {}) 52 | 53 | try { 54 | const installations = await install("foo.com", ConsoleLogger({prefix: "test"})) 55 | assertEquals(installations.length, 2) 56 | assertEquals(installations[0].pkg.project, "bar.org") 57 | assertEquals(installations[1].pkg.project, "foo.com") 58 | } finally { 59 | stub1.restore() 60 | stub2.restore() 61 | stub3.restore() 62 | stub4.restore() 63 | } 64 | 65 | assertEquals(stub1.calls.length, 1) 66 | assertEquals(stub2.calls.length, 1) 67 | assertEquals(stub3.calls.length, Deno.build.os == 'windows' ? 0 : 1) 68 | assertEquals(stub4.calls.length, 1) 69 | assertEquals(stub5.calls.length, 1) 70 | }) 71 | -------------------------------------------------------------------------------- /vendor/sqlite3@0.10.0/src/constants.ts: -------------------------------------------------------------------------------- 1 | // Result Codes 2 | export const SQLITE3_OK = 0; 3 | export const SQLITE3_ERROR = 1; 4 | export const SQLITE3_INTERNAL = 2; 5 | export const SQLITE3_PERM = 3; 6 | export const SQLITE3_ABORT = 4; 7 | export const SQLITE3_BUSY = 5; 8 | export const SQLITE3_LOCKED = 6; 9 | export const SQLITE3_NOMEM = 7; 10 | export const SQLITE3_READONLY = 8; 11 | export const SQLITE3_INTERRUPT = 9; 12 | export const SQLITE3_IOERR = 10; 13 | export const SQLITE3_CORRUPT = 11; 14 | export const SQLITE3_NOTFOUND = 12; 15 | export const SQLITE3_FULL = 13; 16 | export const SQLITE3_CANTOPEN = 14; 17 | export const SQLITE3_PROTOCOL = 15; 18 | export const SQLITE3_EMPTY = 16; 19 | export const SQLITE3_SCHEMA = 17; 20 | export const SQLITE3_TOOBIG = 18; 21 | export const SQLITE3_CONSTRAINT = 19; 22 | export const SQLITE3_MISMATCH = 20; 23 | export const SQLITE3_MISUSE = 21; 24 | export const SQLITE3_NOLFS = 22; 25 | export const SQLITE3_AUTH = 23; 26 | export const SQLITE3_FORMAT = 24; 27 | export const SQLITE3_RANGE = 25; 28 | export const SQLITE3_NOTADB = 26; 29 | export const SQLITE3_NOTICE = 27; 30 | export const SQLITE3_WARNING = 28; 31 | export const SQLITE3_ROW = 100; 32 | export const SQLITE3_DONE = 101; 33 | 34 | // Open Flags 35 | export const SQLITE3_OPEN_READONLY = 0x00000001; 36 | export const SQLITE3_OPEN_READWRITE = 0x00000002; 37 | export const SQLITE3_OPEN_CREATE = 0x00000004; 38 | export const SQLITE3_OPEN_DELETEONCLOSE = 0x00000008; 39 | export const SQLITE3_OPEN_EXCLUSIVE = 0x00000010; 40 | export const SQLITE3_OPEN_AUTOPROXY = 0x00000020; 41 | export const SQLITE3_OPEN_URI = 0x00000040; 42 | export const SQLITE3_OPEN_MEMORY = 0x00000080; 43 | export const SQLITE3_OPEN_MAIN_DB = 0x00000100; 44 | export const SQLITE3_OPEN_TEMP_DB = 0x00000200; 45 | export const SQLITE3_OPEN_TRANSIENT_DB = 0x00000400; 46 | export const SQLITE3_OPEN_MAIN_JOURNAL = 0x00000800; 47 | export const SQLITE3_OPEN_TEMP_JOURNAL = 0x00001000; 48 | export const SQLITE3_OPEN_SUBJOURNAL = 0x00002000; 49 | export const SQLITE3_OPEN_SUPER_JOURNAL = 0x00004000; 50 | export const SQLITE3_OPEN_NONMUTEX = 0x00008000; 51 | export const SQLITE3_OPEN_FULLMUTEX = 0x00010000; 52 | export const SQLITE3_OPEN_SHAREDCACHE = 0x00020000; 53 | export const SQLITE3_OPEN_PRIVATECACHE = 0x00040000; 54 | export const SQLITE3_OPEN_WAL = 0x00080000; 55 | export const SQLITE3_OPEN_NOFOLLOW = 0x01000000; 56 | 57 | // Prepare Flags 58 | export const SQLITE3_PREPARE_PERSISTENT = 0x00000001; 59 | export const SQLITE3_PREPARE_NORMALIZE = 0x00000002; 60 | export const SQLITE3_PREPARE_NO_VTAB = 0x00000004; 61 | 62 | // Fundamental Datatypes 63 | export const SQLITE_INTEGER = 1; 64 | export const SQLITE_FLOAT = 2; 65 | export const SQLITE_TEXT = 3; 66 | export const SQLITE_BLOB = 4; 67 | export const SQLITE_NULL = 5; 68 | -------------------------------------------------------------------------------- /src/plumbing/resolve.ts: -------------------------------------------------------------------------------- 1 | import { Package, PackageRequirement, Installation } from "../types.ts" 2 | import useInventory from "../hooks/useInventory.ts" 3 | import { str as pkgstr } from "../utils/pkg.ts" 4 | import useCellar from "../hooks/useCellar.ts" 5 | import { PkgxError } from "../utils/error.ts" 6 | 7 | /// NOTE resolves to bottles 8 | /// NOTE contract there are no duplicate projects in input 9 | 10 | export interface Resolution { 11 | /// fully resolved list (includes both installed and pending) 12 | pkgs: Package[] 13 | 14 | /// already installed packages 15 | installed: Installation[] 16 | 17 | /// these are the pkgs that aren’t yet installed 18 | pending: Package[] 19 | } 20 | 21 | export class ResolveError extends PkgxError { 22 | pkg: Package | PackageRequirement 23 | 24 | constructor(pkg: Package | PackageRequirement) { 25 | super(`not-found: pkg: ${pkgstr(pkg)}`) 26 | this.pkg = pkg 27 | } 28 | } 29 | 30 | /// resolves a list of package specifications based on what is available in 31 | /// bottle storage if `update` is false we will return already installed pkgs 32 | /// that resolve so if we are resolving `node>=12`, node 13 is installed, but 33 | /// node 19 is the latest we return node 13. if `update` is true we return node 34 | /// 19 and *you will need to install it*. 35 | export default async function resolve(reqs: (Package | PackageRequirement)[], {update}: {update: boolean | Set} = {update: false}): Promise { 36 | const inventory = _internals.useInventory() 37 | const cellar = _internals.useCellar() 38 | const rv: Resolution = { pkgs: [], installed: [], pending: [] } 39 | let installation: Installation | undefined 40 | 41 | const promises: Promise[] = [] 42 | 43 | for (const req of reqs) { 44 | const noup = !should_update(req.project) 45 | if (noup && (installation = await cellar.has(req))) { 46 | // if something is already installed that satisfies the constraint then use it 47 | rv.installed.push(installation) 48 | rv.pkgs.push(installation.pkg) 49 | } else { 50 | const promise = inventory.select(req).then(async version => { 51 | if (!version) { 52 | throw new ResolveError(req) 53 | } 54 | const pkg = { version, project: req.project } 55 | rv.pkgs.push(pkg) 56 | 57 | if ((installation = await cellar.has(pkg))) { 58 | // we were asked to update, but we already are at the latest version 59 | rv.installed.push(installation) 60 | } else { 61 | rv.pending.push(pkg) 62 | } 63 | }) 64 | promises.push(promise) 65 | } 66 | } 67 | 68 | await Promise.all(promises) 69 | 70 | return rv 71 | 72 | function should_update(project: string) { 73 | return update === true || (update instanceof Set && update.has(project)) 74 | } 75 | } 76 | 77 | export const _internals = { 78 | useInventory, 79 | useCellar 80 | } 81 | -------------------------------------------------------------------------------- /src/plumbing/link.ts: -------------------------------------------------------------------------------- 1 | import SemVer, * as semver from "../utils/semver.ts" 2 | import { Package, Installation } from "../types.ts" 3 | import useCellar from "../hooks/useCellar.ts" 4 | import { panic } from "../utils/error.ts" 5 | import fs from "node:fs/promises" 6 | import Path from "../utils/Path.ts" 7 | 8 | export default async function link(pkg: Package | Installation) { 9 | const installation = await useCellar().resolve(pkg) 10 | pkg = installation.pkg 11 | 12 | const versions = (await useCellar() 13 | .ls(installation.pkg.project)) 14 | .map(({pkg: {version}, path}) => [version, path] as [SemVer, Path]) 15 | .sort(([a],[b]) => a.compare(b)) 16 | 17 | if (versions.length <= 0) { 18 | const err = new Error('no versions') 19 | err.cause = pkg 20 | throw err 21 | } 22 | 23 | const shelf = installation.path.parent() 24 | const newest = versions.slice(-1)[0] 25 | const vMm = `${pkg.version.major}.${pkg.version.minor}` 26 | const minorRange = new semver.Range(`^${vMm}`) 27 | const mostMinor = versions.filter(v => minorRange.satisfies(v[0])).at(-1) ?? panic() 28 | 29 | if (mostMinor[0].neq(pkg.version)) return 30 | // ^^ if we’re not the most minor we definitely not the most major 31 | 32 | await makeSymlink(`v${vMm}`) 33 | 34 | const majorRange = new semver.Range(`^${pkg.version.major.toString()}`) 35 | const mostMajor = versions.filter(v => majorRange.satisfies(v[0])).at(-1) ?? panic() 36 | 37 | if (mostMajor[0].neq(pkg.version)) return 38 | // ^^ if we’re not the most major we definitely aren’t the newest 39 | 40 | await makeSymlink(`v${pkg.version.major}`) 41 | 42 | if (pkg.version.eq(newest[0])) { 43 | await makeSymlink('v*') 44 | } 45 | 46 | async function makeSymlink(symname: string) { 47 | try { 48 | const what_we_make = shelf.join(symname) 49 | if (what_we_make.isSymlink()) { 50 | try { 51 | // using this rather than rm due to bug in deno shims that 52 | // tries to call rmdir on the symlink because the symlink points to a dir 53 | await fs.unlink(what_we_make.string) 54 | } catch (err) { 55 | // we were deleted by another thing linking simultaneously 56 | //FIXME our flock should surround the link step too 57 | if (err instanceof Error && "code" in err && err.code != 'ENOENT') throw err 58 | } 59 | } 60 | 61 | await Deno.symlink( 62 | installation.path.basename(), // makes it relative 63 | shelf.join(symname).rm().string, 64 | {type: 'dir'}) 65 | } catch (err) { 66 | if (err instanceof Deno.errors.AlreadyExists || err instanceof Error && "code" in err && err.code === 'EEXIST') { 67 | //FIXME race condition for installing the same pkg simultaneously 68 | // real fix is to lock around the entire download/untar/link process 69 | return 70 | } else { 71 | throw err 72 | } 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/utils/pkg.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertFalse, assertThrows } from "@std/assert" 2 | import SemVer, { Range } from "./semver.ts" 3 | import * as pkg from "./pkg.ts" 4 | 5 | Deno.test("pkg.str", async test => { 6 | let out: string 7 | 8 | await test.step("precise", () => { 9 | out = pkg.str({ 10 | project: "test", 11 | version: new SemVer("1.2.3") 12 | }) 13 | assertEquals(out, "test=1.2.3") 14 | }) 15 | 16 | for (const range of ["^1", "^1.2", "^1.2.3"]) { 17 | await test.step(range, () => { 18 | out = pkg.str({ 19 | project: "test", 20 | constraint: new Range(range) 21 | }) 22 | assertEquals(out, `test${range}`) 23 | }) 24 | } 25 | 26 | for (const [range, expected] of [[">=1 <2", "^1"], [">=1.2 <2", "^1.2"], [">=1.2.3 <2", "^1.2.3"]]) { 27 | await test.step(`${range} == ${expected}`, () => { 28 | out = pkg.str({ 29 | project: "test", 30 | constraint: new Range(range) 31 | }) 32 | assertEquals(out, `test${expected}`) 33 | }) 34 | } 35 | 36 | await test.step("range of one version", () => { 37 | const constraint = new Range("=1.2.3") 38 | 39 | out = pkg.str({ 40 | project: "test", 41 | constraint 42 | }) 43 | assert(constraint.single()) 44 | assertEquals(out, `test=1.2.3`) 45 | }) 46 | }) 47 | 48 | Deno.test("pkg.parse", async test => { 49 | await test.step("@5", () => { 50 | const { constraint } = pkg.parse("test@5") 51 | assert(constraint.satisfies(new SemVer([5,0,0]))) 52 | assert(constraint.satisfies(new SemVer([5,1,0]))) 53 | assertFalse(constraint.satisfies(new SemVer([6,0,0]))) 54 | }) 55 | 56 | await test.step("@5.0", () => { 57 | const { constraint } = pkg.parse("test@5.0") 58 | assert(constraint.satisfies(new SemVer([5,0,0]))) 59 | assert(constraint.satisfies(new SemVer([5,0,1]))) 60 | assertFalse(constraint.satisfies(new SemVer([5,1,0]))) 61 | }) 62 | 63 | await test.step("@5.0.0", () => { 64 | const { constraint } = pkg.parse("test@5.0.0") 65 | assert(constraint.satisfies(new SemVer([5,0,0]))) 66 | assert(constraint.satisfies(new SemVer([5,0,0,1]))) 67 | assertFalse(constraint.satisfies(new SemVer([5,0,1]))) 68 | }) 69 | 70 | await test.step("bad input", () => { 71 | assertThrows(() => pkg.parse("asdf^@~"), "invalid pkgspec: asdf^@~") 72 | }) 73 | 74 | await test.step("leading & trailing space", () => { 75 | const { constraint } = pkg.parse(" test@5\t") 76 | assert(constraint.satisfies(new SemVer([5,0,0]))) 77 | }) 78 | }) 79 | 80 | Deno.test("pkg.compare", async test => { 81 | await test.step("compare versions", () => { 82 | const a = { project: "test", version: new SemVer("1.2.3") } 83 | const b = { project: "test", version: new SemVer("2.1.3") } 84 | assert(pkg.compare(a, b) < 0) 85 | }) 86 | 87 | await test.step("compare pkg names", () => { 88 | const a = { project: "a", version: new SemVer("1.2.3") } 89 | const b = { project: "b", version: new SemVer("1.2.3") } 90 | assert(pkg.compare(a, b) < 0) 91 | }) 92 | }) 93 | -------------------------------------------------------------------------------- /src/hooks/useConfig.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertFalse, assertThrows, assertMatch } from "@std/assert" 2 | import { _internals, ConfigDefault } from "./useConfig.ts" 3 | import { useTestConfig } from "./useTestConfig.ts" 4 | import Path from "../utils/Path.ts" 5 | 6 | Deno.test("useConfig", () => { 7 | let config = useTestConfig() 8 | 9 | if (Deno === undefined) { 10 | assertMatch(config.UserAgent!, /libpkgx\/\d+\.\d+.\d+/) 11 | } else { 12 | assertEquals(config.UserAgent, "libpkgx") 13 | } 14 | 15 | const PKGX_PANTRY_PATH = Deno.build.os == 'windows' ? "C:\\foo;D:\\bar" : "/foo:/bar" 16 | 17 | config = ConfigDefault({ PKGX_PANTRY_PATH, CI: "true" }) 18 | if (Deno.build.os == 'windows') { 19 | assertEquals(config.pantries.map(x => x.string), ["C:\\foo", "D:\\bar"]) 20 | } else { 21 | assertEquals(config.pantries.map(x => x.string), ["/foo", "/bar"]) 22 | } 23 | assertEquals(config.options.compression, "gz") 24 | 25 | assertFalse(_internals.boolize("false")) 26 | assertFalse(_internals.boolize("0")) 27 | assertFalse(_internals.boolize("no")) 28 | assert(_internals.boolize("1")) 29 | assert(_internals.boolize("yes")) 30 | 31 | assert(_internals.initialized()) 32 | }) 33 | 34 | Deno.test("useConfig empty PKGX_DIR is ignored", () => { 35 | assertEquals(ConfigDefault({ PKGX_DIR: "" }).prefix, Path.home().join(".pkgx")) 36 | assertEquals(ConfigDefault({ PKGX_DIR: " " }).prefix, Path.home().join(".pkgx")) 37 | assertEquals(ConfigDefault({ PKGX_DIR: " / " }).prefix, Path.root) 38 | assertThrows(() => ConfigDefault({ PKGX_DIR: " foo " })) 39 | assertThrows(() => ConfigDefault({ PKGX_DIR: "foo" })) 40 | }) 41 | 42 | Deno.test("useConfig empty PKGX_PANTRY_PATH is ignored", () => { 43 | const SEP = Deno.build.os == 'windows' ? ';' : ':' 44 | assertEquals(ConfigDefault({ PKGX_PANTRY_PATH: "" }).pantries, []) 45 | assertEquals(ConfigDefault({ PKGX_PANTRY_PATH: ` ${SEP} ${SEP}` }).pantries, []) 46 | }) 47 | 48 | Deno.test("pkgx^2 rules", () => { 49 | switch (Deno.build.os) { 50 | case 'windows': 51 | assertEquals(ConfigDefault({ XDG_DATA_HOME: "C:\\foo" }).data, Path.home().join("AppData/Local")); 52 | assertEquals(ConfigDefault().data, Path.home().join("AppData/Local")); 53 | 54 | assertEquals(ConfigDefault({ XDG_CACHE_HOME: "C:\\foo" }).cache, Path.home().join("AppData/Local")); 55 | assertEquals(ConfigDefault().cache, Path.home().join("AppData/Local")); 56 | break; 57 | case 'darwin': 58 | assertEquals(ConfigDefault({ XDG_DATA_HOME: "/foo" }).data, Path.home().join("Library/Application Support/pkgx")); 59 | assertEquals(ConfigDefault().data, Path.home().join("Library/Application Support/pkgx")); 60 | 61 | assertEquals(ConfigDefault({ XDG_CACHE_HOME: "/foo" }).cache, Path.home().join("Library/Caches/pkgx")); 62 | assertEquals(ConfigDefault().cache, Path.home().join("Library/Caches/pkgx")); 63 | break; 64 | case 'linux': 65 | assertEquals(ConfigDefault({ XDG_DATA_HOME: "/foo" }).data, new Path("/foo/pkgx")); 66 | assertEquals(ConfigDefault().data, Path.home().join(".local/share/pkgx")); 67 | break; 68 | } 69 | }) -------------------------------------------------------------------------------- /src/utils/misc.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals, assertRejects, assertThrows } from "@std/assert" 2 | import { flatmap, validate } from "./misc.ts" 3 | import { isNumber } from "is-what" 4 | 5 | Deno.test("validate string", () => { 6 | assertEquals(validate.str(true), "true") 7 | assertEquals(validate.str(false), "false") 8 | assertEquals(validate.str(1), "1") 9 | 10 | assertThrows(() => validate.str({}), "not-string: {}") 11 | }) 12 | 13 | Deno.test("validate array", () => { 14 | assertEquals(validate.arr(["1", "2"]), ["1", "2"]) 15 | assertThrows(() => validate.arr("jkl"), "not-array: jkl") 16 | }) 17 | 18 | Deno.test("validate obj", () => { 19 | assertEquals(validate.obj({a: 1}), {a: 1}) 20 | assertThrows(() => validate.obj("jkl"), "not-array: jkl") 21 | }) 22 | 23 | Deno.test("flatmap", () => { 24 | assertEquals(flatmap(1, (n) => n + 1), 2) 25 | assertEquals(flatmap(undefined, (n: number) => n + 1), undefined) 26 | assertEquals(flatmap(1, (_: number) => undefined), undefined) 27 | 28 | const throws = (_n: number) => { 29 | throw Error("test error") 30 | } 31 | 32 | assertEquals(flatmap(1, throws, {rescue: true}), undefined) 33 | assertThrows(() => flatmap(1, throws), "test error") 34 | }) 35 | 36 | Deno.test("async flatmap", async () => { 37 | const add = (n: number) => Promise.resolve(n + 1) 38 | 39 | assertEquals(await flatmap(Promise.resolve(1), add), 2) 40 | assertEquals(await flatmap(Promise.resolve(undefined), add), undefined) 41 | assertEquals(await flatmap(Promise.resolve(1), (_n) => undefined), undefined) 42 | 43 | assertEquals(await flatmap(Promise.resolve(1), () => Promise.reject(new Error()), {rescue: true}), undefined) 44 | await assertRejects(() => flatmap(Promise.resolve(1), () => Promise.reject("new Error()")) ?? Promise.resolve()) 45 | }) 46 | 47 | Deno.test("chuzzle", () => { 48 | assertEquals("".chuzzle(), undefined) 49 | assertEquals("test".chuzzle(), "test") 50 | assertEquals((1).chuzzle(), 1) 51 | assertEquals(NaN.chuzzle(), undefined) 52 | }) 53 | 54 | Deno.test("set insert", () => { 55 | const s = new Set([1, 2, 3]) 56 | 57 | assertEquals(s.insert(1), {inserted: false}) 58 | assertEquals(s.insert(4), {inserted: true}) 59 | assertEquals(s.size, 4) 60 | 61 | assertEquals(s.has(1), true) 62 | assertEquals(s.has(4), true) 63 | }) 64 | 65 | Deno.test("array compact", () => { 66 | assertEquals([1, 2, undefined, null, false, 3].compact(), [1, 2, 3]) 67 | assertEquals([1, 2, undefined, null, false, 3].compact((n) => isNumber(n) && n * 2), [2, 4, 6]) 68 | 69 | // will fail to compile if the compiler cannot infer the type of the compact() return 70 | assertEquals([1, 2, undefined, null, false as false | number, 3].compact()[0] + 1, 2) 71 | 72 | // verifies transforming the type gives singular type return 73 | const foo = [1, 2, undefined, null, false, 3].compact((n) => isNumber(n) && `${n * 2}`) 74 | assertEquals(foo, ["2", "4", "6"]) 75 | 76 | const throws = () => { 77 | throw Error("test error") 78 | } 79 | assertEquals([()=>1, ()=>2, throws, ()=>3].compact((n) => n() * 2, { rescue: true }), [2, 4, 6]) 80 | assertThrows(() => [()=>1, ()=>2, throws, ()=>3].compact((n) => n() * 2)) 81 | }) 82 | -------------------------------------------------------------------------------- /src/hooks/usePantry.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertThrows } from "@std/assert" 2 | import usePantry, { validatePackageRequirement } from "./usePantry.ts" 3 | import { useTestConfig, srcroot } from "./useTestConfig.ts" 4 | import { _internals } from "../utils/host.ts" 5 | import { stub } from "@std/testing/mock" 6 | import SemVer from "../utils/semver.ts" 7 | 8 | Deno.test("provides()", async () => { 9 | useTestConfig() 10 | const exenames = await usePantry().project("python.org").provides() 11 | assert(exenames.includes("python")) 12 | }) 13 | 14 | Deno.test("which()", async () => { 15 | useTestConfig() 16 | const pkg = await usePantry().which({ interprets: ".py" }) 17 | assertEquals(pkg?.project, "python.org") 18 | }) 19 | 20 | Deno.test("provider()", async () => { 21 | useTestConfig() 22 | const provides = await usePantry().project("npmjs.com").provider() 23 | const foo = provides!('truffle') 24 | assertEquals(foo![0], 'npx') 25 | }) 26 | 27 | Deno.test("available()", async () => { 28 | useTestConfig() 29 | const stubber = stub(_internals, 'platform', () => "darwin" as "darwin" | "linux") 30 | assert(await usePantry().project("python.org").available()) 31 | stubber.restore() 32 | }) 33 | 34 | Deno.test("runtime.env", async () => { 35 | const PKGX_PANTRY_PATH = srcroot.join("fixtures").string 36 | const { prefix } = useTestConfig({ PKGX_PANTRY_PATH }) 37 | 38 | const deps = [{ 39 | pkg: { 40 | project: "bar.com", 41 | version: new SemVer("1.2.3") 42 | }, 43 | path: prefix.join("bar.com/v1.2.3") 44 | }] 45 | 46 | const env = await usePantry().project("foo.com").runtime.env(new SemVer("2.3.4"), deps) 47 | 48 | assertEquals(env.BAZ, prefix.join("bar.com/v1.2.3/baz").string) 49 | }) 50 | 51 | Deno.test("missing - without cache", () => { 52 | useTestConfig() 53 | usePantry().prefix.rm({ recursive: true }) 54 | assert(usePantry().missing()) 55 | }) 56 | 57 | Deno.test("missing - with cache", () => { 58 | useTestConfig().cache.mkdir("p").join('pantry.db').touch() 59 | usePantry().prefix.rm({ recursive: true }) 60 | assert(usePantry().missing()) 61 | }) 62 | 63 | Deno.test("validatePackageRequirement - valid input", () => { 64 | const result = validatePackageRequirement("pkgx.sh/test", "^1.0.0") 65 | assertEquals(result?.project, "pkgx.sh/test") 66 | assertEquals(result?.constraint.toString(), "^1") 67 | }) 68 | 69 | Deno.test("validatePackageRequirement - invalid constraint", () => { 70 | assertThrows(() => validatePackageRequirement("pkgx.sh/test", "nonsense")) 71 | }) 72 | 73 | Deno.test("validatePackageRequirement - number constraint", () => { 74 | const result = validatePackageRequirement("pkgx.sh/test", 1) 75 | assertEquals(result?.constraint.toString(), "^1") 76 | }) 77 | 78 | 79 | Deno.test("validatePackageRequirement - valid input", () => { 80 | const result = validatePackageRequirement("pkgx.sh/test", "^1.0.0") 81 | assertEquals(result?.project, "pkgx.sh/test") 82 | assertEquals(result?.constraint.toString(), "^1") 83 | }) 84 | 85 | Deno.test("validatePackageRequirement - invalid constraint", () => { 86 | assertThrows(() => validatePackageRequirement("pkgx.sh/test", "nonsense")) 87 | }) 88 | 89 | Deno.test("validatePackageRequirement - number constraint", () => { 90 | const result = validatePackageRequirement("pkgx.sh/test", 1) 91 | assertEquals(result?.constraint.toString(), "^1") 92 | }) 93 | 94 | Deno.test("find", async () => { 95 | useTestConfig() 96 | const foo = await usePantry().find("python@3.11") 97 | assertEquals(foo.length, 1) 98 | assertEquals(foo[0].project, "python.org") 99 | }) -------------------------------------------------------------------------------- /src/hooks/useCellar.ts: -------------------------------------------------------------------------------- 1 | import { Package, PackageRequirement, Installation } from "../types.ts" 2 | import { PkgxError } from "../utils/error.ts" 3 | import * as pkgutils from "../utils/pkg.ts" 4 | import SemVer from "../utils/semver.ts" 5 | import useConfig from "./useConfig.ts" 6 | import Path from "../utils/Path.ts" 7 | 8 | export class InstallationNotFoundError extends PkgxError { 9 | pkg: Package | PackageRequirement 10 | 11 | constructor(pkg: Package | PackageRequirement) { 12 | super(`not found: ${pkgutils.str(pkg)}`) 13 | this.pkg = pkg 14 | } 15 | } 16 | 17 | export default function useCellar() { 18 | const config = useConfig() 19 | 20 | /// eg. ~/.pkgx/deno.land 21 | const shelf = (project: string) => config.prefix.join(project) 22 | 23 | /// eg. ~/.pkgx/deno.land/v1.2.3 24 | const keg = (pkg: Package) => shelf(pkg.project).join(`v${pkg.version}`) 25 | 26 | /// returns the `Installation` if the pkg is installed 27 | const has = (pkg: Package | PackageRequirement | Path) => resolve(pkg).swallow(InstallationNotFoundError) 28 | 29 | return { 30 | has, 31 | ls, 32 | keg, 33 | resolve, 34 | shelf, 35 | } 36 | 37 | /// returns a project’s installations (sorted by version) 38 | async function ls(project: string) { 39 | const d = shelf(project) 40 | 41 | if (!d.isDirectory()) return [] 42 | 43 | const rv: Installation[] = [] 44 | for await (const [path, {name, isDirectory}] of d.ls()) { 45 | try { 46 | if (!isDirectory) continue 47 | if (!name.startsWith("v") || name == 'var') continue 48 | const version = new SemVer(name) 49 | if (await vacant(path)) continue // failed build probs 50 | rv.push({path, pkg: {project, version}}) 51 | } catch { 52 | //noop: other directories can exist 53 | } 54 | } 55 | 56 | return rv.sort((a, b) => pkgutils.compare(a.pkg, b.pkg)) 57 | } 58 | 59 | /// if package is installed, returns its installation 60 | async function resolve(pkg: Package | PackageRequirement | Path | Installation) { 61 | const installation = await (async () => { 62 | if ("pkg" in pkg) { return pkg } 63 | // ^^ is `Installation` 64 | 65 | const { prefix } = config 66 | if (pkg instanceof Path) { 67 | const path = pkg 68 | const version = new SemVer(path.basename()) 69 | const project = path.parent().relative({ to: prefix }) 70 | return { 71 | path, pkg: { project, version } 72 | } 73 | } else if ("version" in pkg) { 74 | const path = keg(pkg) 75 | return { path, pkg } 76 | } else { 77 | const installations = await ls(pkg.project) 78 | const versions = installations.map(({ pkg: {version}}) => version) 79 | const version = pkg.constraint.max(versions) 80 | if (version) { 81 | const path = installations.find(({pkg: {version: v}}) => v.eq(version))!.path 82 | return { path, pkg: { project: pkg.project, version } } 83 | } else { 84 | throw new InstallationNotFoundError(pkg) 85 | } 86 | } 87 | })() 88 | if (await vacant(installation.path)) { 89 | throw new InstallationNotFoundError(installation.pkg) 90 | } 91 | return installation 92 | } 93 | } 94 | 95 | /// if we ignore transient files, is there a package here? 96 | async function vacant(path: Path): Promise { 97 | if (!path.isDirectory()) { 98 | return true 99 | } else for await (const _ of path.ls()) { 100 | return false 101 | } 102 | return true 103 | } 104 | -------------------------------------------------------------------------------- /src/hooks/useSync.ts: -------------------------------------------------------------------------------- 1 | import { flock } from "../utils/flock.ts" 2 | import useDownload from "./useDownload.ts" 3 | import usePantry from "./usePantry.ts" 4 | import useConfig from "./useConfig.ts" 5 | import Path from "../utils/Path.ts" 6 | import useSyncCache from "./useSyncCache.ts"; 7 | 8 | //FIXME tar is fetched from PATH :/ we want control 9 | //FIXME run in general is not controllable since it delegates to the shell 10 | 11 | interface Logger { 12 | syncing(path: Path): void 13 | caching(path: Path): void 14 | syncd(path: Path): void 15 | } 16 | 17 | export default async function(logger?: Logger) { 18 | const pantry_dir = usePantry().prefix.parent() 19 | 20 | logger?.syncing(pantry_dir) 21 | 22 | const unflock = await flock(pantry_dir.mkdir('p')) 23 | 24 | try { 25 | await _internals.sync(pantry_dir) 26 | try { 27 | logger?.caching(pantry_dir) 28 | await _internals.cache() 29 | } catch (err) { 30 | console.warn("failed to cache pantry") 31 | console.error(err) 32 | } 33 | } finally { 34 | await unflock() 35 | } 36 | 37 | logger?.syncd(pantry_dir) 38 | } 39 | 40 | export const _internals = { 41 | sync, cache: useSyncCache 42 | } 43 | 44 | async function sync(pantry_dir: Path) { 45 | try { 46 | //TODO if there was already a lock, just wait on it, don’t do the following stuff 47 | 48 | const git_dir = pantry_dir.parent().join("pantries/pkgxdev/pantry") 49 | 50 | if (git_dir.join("HEAD").isFile()) { 51 | await git("-C", git_dir, "fetch", "--quiet", "origin", "--force", "main:main") 52 | } else { 53 | await git("clone", "--quiet", "--bare", "--depth=1", "https://github.com/pkgxdev/pantry", git_dir) 54 | } 55 | 56 | await git("--git-dir", git_dir, "--work-tree", pantry_dir, "checkout", "--quiet", "--force") 57 | 58 | } catch { 59 | // git failure or no git installed 60 | // ∴ download the latest tarball and uncompress over the top 61 | //FIXME deleted packages will not be removed with this method 62 | const src = new URL(`https://github.com/pkgxdev/pantry/archive/refs/heads/main.tar.gz`) 63 | const proc = new Deno.Command("tar", { 64 | args: ["xzf", "-", "--strip-components=1"], 65 | cwd: pantry_dir.string, 66 | stdin: "piped", 67 | }).spawn() 68 | const writer = proc.stdin.getWriter() 69 | await useDownload().download({ src }, blob => writer.write(blob)) 70 | writer.close() 71 | 72 | if (!(await proc.status).success) { 73 | throw new Error("untar failed") 74 | } 75 | } 76 | } 77 | 78 | //////////////////////// utils 79 | 80 | async function git(...args: (string | Path)[]) { 81 | const { git } = useConfig() 82 | if (!git) throw new Error("no-git") // caught above to trigger http download instead 83 | await run({cmd: [git, ...args]}) 84 | } 85 | 86 | export interface RunOptions { 87 | cmd: (string | Path)[] 88 | } 89 | 90 | async function run(opts: RunOptions) { 91 | const cmd = opts.cmd.map(x => `${x}`); 92 | const env = (({ HTTP_PROXY, HTTPS_PROXY }) => ({ HTTP_PROXY, HTTPS_PROXY }))(Deno.env.toObject()); 93 | 94 | const proc = new Deno.Command(cmd[0], { 95 | args: cmd.slice(1), 96 | stdout: 'null', 97 | clearEnv: true, 98 | env, 99 | ...opts, 100 | }); 101 | 102 | const child = proc.spawn(); 103 | 104 | try { 105 | const status = await child.status; 106 | if (!status.success) { 107 | throw new Error(`run.exit(${status.code})`); 108 | } 109 | } catch (err) { 110 | if (err instanceof Error) { 111 | err.cause = child; 112 | } 113 | throw err; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /src/porcelain/install.ts: -------------------------------------------------------------------------------- 1 | import install, { Logger as BaseLogger, ConsoleLogger as BaseConsoleLogger } from "../plumbing/install.ts" 2 | import { Installation, PackageSpecification } from "../types.ts" 3 | import resolve, { Resolution } from "../plumbing/resolve.ts" 4 | import usePantry from "../hooks/usePantry.ts" 5 | import hydrate from "../plumbing/hydrate.ts" 6 | import useSync from "../hooks/useSync.ts" 7 | import { parse } from "../utils/pkg.ts" 8 | import link from "../plumbing/link.ts" 9 | import { is_what } from "../deps.ts" 10 | const { isString } = is_what 11 | 12 | export interface Logger extends BaseLogger { 13 | resolved?(resolution: Resolution): void 14 | /// from 0.0–1.0 15 | /// currently you won’t get this immediately since we are waiting for all our 16 | /// network requests to return before we know the final download size 17 | progress?(completion: number): void 18 | } 19 | 20 | // deno-lint-ignore no-explicit-any 21 | export function ConsoleLogger(prefix?: any): Logger { 22 | prefix = prefix ? `${prefix}: ` : "" 23 | return { 24 | ...BaseConsoleLogger(prefix), 25 | progress: function() { console.error(`${prefix}progress`, ...arguments) }, 26 | } 27 | } 28 | 29 | /// eg. install("python.org~3.10") 30 | export default async function(pkgs: PackageSpecification[] | string[] | string, logger?: Logger): Promise { 31 | 32 | const { hydrate, resolve, install, link, useSync } = _internals 33 | 34 | if (isString(pkgs)) pkgs = pkgs.split(/\s+/) 35 | pkgs = pkgs.map(pkg => isString(pkg) ? parse(pkg) : pkg) 36 | 37 | const pantry = usePantry() 38 | 39 | if (pantry.missing()) { 40 | await useSync() 41 | } 42 | 43 | //TODO parallelize! 44 | 45 | pkgs = (await hydrate(pkgs)).pkgs 46 | const resolution = await resolve(pkgs) 47 | logger?.resolved?.(resolution) 48 | 49 | const { pending, installed } = resolution 50 | logger = WrapperLogger(pending, logger) 51 | const installers = pending 52 | .map(pkg => install(pkg, logger) 53 | .then(i => Deno.build.os != 'windows' 54 | ? link(i).then(() => i) 55 | : i)) 56 | 57 | installed.push(...await Promise.all(installers)) 58 | 59 | return installed 60 | } 61 | 62 | function WrapperLogger(pending: PackageSpecification[], logger?: Logger): Logger | undefined { 63 | if (!logger?.progress) return logger 64 | 65 | const projects = pending.map(pkg => pkg.project) 66 | const totals: Record = {} 67 | const progresses: Record = {} 68 | return { 69 | ...logger, 70 | downloading: args => { 71 | const { pkg: {project}, total } = args 72 | if (total) { 73 | totals[project] = total 74 | updateProgress() 75 | } 76 | if (logger?.downloading) { 77 | logger.downloading(args) 78 | } 79 | }, 80 | installing: args => { 81 | const { pkg: {project}, progress } = args 82 | if (progress) { 83 | progresses[project] = progress 84 | updateProgress() 85 | } 86 | if (logger?.installing) { 87 | logger.installing(args) 88 | } 89 | } 90 | } 91 | 92 | function updateProgress() { 93 | let total_untard_bytes = 0 94 | let grand_total = 0 95 | for (const project of projects) { 96 | const total = totals[project] 97 | const bytes = progresses[project] * total 98 | total_untard_bytes += bytes 99 | grand_total += total 100 | } 101 | const rv = total_untard_bytes / grand_total 102 | if (!isNaN(rv)) { 103 | logger!.progress!(total_untard_bytes / grand_total) 104 | } 105 | } 106 | } 107 | 108 | export const _internals = { 109 | hydrate, 110 | resolve, 111 | install, 112 | link, 113 | useSync 114 | } -------------------------------------------------------------------------------- /src/utils/misc.ts: -------------------------------------------------------------------------------- 1 | //CONTRACT you can’t use anything from hooks 2 | 3 | import { is_what, PlainObject } from "../deps.ts" 4 | const { isPlainObject, isArray } = is_what 5 | 6 | function validate_str(input: unknown): string { 7 | if (typeof input == 'boolean') return input ? 'true' : 'false' 8 | if (typeof input == 'number') return input.toString() 9 | if (typeof input != 'string') throw new Error(`not-string: ${input}`) 10 | return input 11 | } 12 | 13 | function validate_plain_obj(input: unknown): PlainObject { 14 | if (!isPlainObject(input)) throw new Error(`not-plain-obj: ${JSON.stringify(input)}`) 15 | return input 16 | } 17 | 18 | function validate_arr(input: unknown): Array { 19 | if (!isArray(input)) throw new Error(`not-array: ${JSON.stringify(input)}`) 20 | return input 21 | } 22 | 23 | const validate = { 24 | str: validate_str, 25 | obj: validate_plain_obj, 26 | arr: validate_arr 27 | } 28 | 29 | export { validate } 30 | 31 | ////////////////////////////////////////////////////////////// base extensions 32 | type Falsy = false | 0 | '' | null | undefined; 33 | 34 | declare global { 35 | interface Array { 36 | compact(): Array>; 37 | compact(body: (t: T) => S | Falsy): Array 38 | compact(body?: (t: T) => S | T | Falsy, opts?: { rescue: boolean }): Array 39 | } 40 | 41 | interface Set { 42 | insert(t: T): { inserted: boolean } 43 | } 44 | } 45 | 46 | Set.prototype.insert = function(t: T) { 47 | if (this.has(t)) { 48 | return {inserted: false} 49 | } else { 50 | this.add(t) 51 | return {inserted: true} 52 | } 53 | } 54 | 55 | Array.prototype.compact = function(body?: (t: T) => S | Falsy, opts?: { rescue: boolean }): S[] { 56 | const rv: S[] = [] 57 | for (const e of this) { 58 | try { 59 | const f = body ? body(e) : e 60 | if (f) rv.push(f) 61 | } catch (err) { 62 | if (opts === undefined || opts.rescue === false) throw err 63 | } 64 | } 65 | return rv 66 | } 67 | 68 | export function flatmap(t: T | Falsy, body: (t: T) => S | Falsy, opts?: {rescue: boolean}): S | undefined; 69 | export function flatmap(t: Promise, body: (t: T) => Promise, opts?: {rescue: boolean}): Promise; 70 | export function flatmap(t: Promise | (T | Falsy), body: (t: T) => (S | Falsy) | Promise, opts?: {rescue: boolean}): Promise | (S | undefined) { 71 | try { 72 | if (t instanceof Promise) { 73 | const foo = t.then(t => { 74 | if (!t) return 75 | const s = body(t) as Promise 76 | if (!s) return 77 | const bar = s.then(body => body || undefined) 78 | if (opts?.rescue) { 79 | return bar.catch(() => { return undefined }) 80 | } else { 81 | return bar 82 | } 83 | }) 84 | return foo 85 | } else { 86 | if (t) return body(t) as (S | Falsy) || undefined 87 | } 88 | } catch (err) { 89 | if (!opts?.rescue) throw err 90 | } 91 | } 92 | 93 | // export async function async_flatmap(t: Promise, body: (t: T) => Promise, opts?: {rescue?: boolean}): Promise { 94 | // try { 95 | // const tt = await t 96 | // if (tt) return await body(tt) || undefined 97 | // } catch (err) { 98 | // if (!opts?.rescue) throw err 99 | // } 100 | // } 101 | 102 | //////////////////////////////////////////////////////// chuzzle 103 | declare global { 104 | interface String { 105 | chuzzle(): string | undefined 106 | } 107 | 108 | interface Number { 109 | chuzzle(): number | undefined 110 | } 111 | } 112 | 113 | String.prototype.chuzzle = function() { 114 | return this.trim() || undefined 115 | } 116 | 117 | Number.prototype.chuzzle = function() { 118 | return Number.isNaN(this) ? undefined : this as number 119 | } 120 | -------------------------------------------------------------------------------- /src/plumbing/install.test.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file no-explicit-any 2 | import { useTestConfig, srcroot } from "../hooks/useTestConfig.ts" 3 | import { assert, assertEquals, assertFalse } from "@std/assert" 4 | import install, { ConsoleLogger, Logger } from "./install.ts" 5 | import { stub } from "@std/testing/mock" 6 | import SemVer from "../utils/semver.ts" 7 | import { Package } from "../types.ts" 8 | import host from "../utils/host.ts"; 9 | import { _internals } from "../hooks/useFetch.ts" 10 | 11 | Deno.test("install", async runner => { 12 | 13 | const pkg: Package = { 14 | project: "foo.com", 15 | version: new SemVer("5.43.0") 16 | } 17 | const { arch, platform } = host() 18 | 19 | // deno-lint-ignore require-await 20 | const fetch_stub = stub(_internals, "fetch", async opts => { 21 | if ((opts as URL).pathname.endsWith("sha256sum")) { 22 | return { 23 | ok: true, 24 | status: 200, 25 | text() { 26 | return Promise.resolve("03301cc30a9ca1fdc90dc130da2b3672932f331beae78e65e8bb4e0c6c99840b") 27 | } 28 | } 29 | } else { 30 | return {status: 304, ok: true} as any 31 | } 32 | }) 33 | 34 | try { 35 | await runner.step("install()", async runner => { 36 | const conf = useTestConfig({ CI: "1" }) // CI to force .gz compression 37 | 38 | /// download() will use the cached version and not do http 39 | srcroot.join("fixtures/foo.com-5.43.0.tgz").cp({ to: 40 | conf.cache.mkdir('p').join(`foo.com-5.43.0+${platform}+${arch}.tar.gz`) 41 | }) 42 | 43 | await runner.step("download & install", async () => { 44 | // for coverage 45 | const logger = ConsoleLogger() 46 | // const stubber = stub(console, "error", x => assert(x)) 47 | 48 | const installation = await install(pkg, logger) 49 | 50 | assertEquals(installation.pkg.project, pkg.project) 51 | assertEquals(installation.pkg.version, pkg.version) 52 | assertEquals(installation.path, conf.prefix.join(pkg.project, `v${pkg.version}`)) 53 | 54 | /// so next test works 55 | installation.path.rm({ recursive: true }) 56 | 57 | // stubber.restore() 58 | }) 59 | 60 | await runner.step("untar & install", async () => { 61 | // since we're already downloaded this tests the untar directly code-path 62 | // also tests we can overwrite stuff since that seems to be a thing we expect 63 | 64 | const installation = await install(pkg) 65 | 66 | assertEquals(installation.pkg.project, pkg.project) 67 | assertEquals(installation.pkg.version, pkg.version) 68 | assertEquals(installation.path, conf.prefix.join(pkg.project, `v${pkg.version}`)) 69 | }) 70 | }) 71 | 72 | await runner.step("install locks", async () => { 73 | 74 | const conf = useTestConfig({ CI: "1" }) 75 | 76 | /// download() will use the cached version and not do http 77 | srcroot.join("fixtures/foo.com-5.43.0.tgz").cp({ to: 78 | conf.cache.mkdir('p').join(`foo.com-5.43.0+${platform}+${arch}.tar.gz`) 79 | }) 80 | 81 | let unlocked_once = false 82 | const logger: Logger = { 83 | downloading: () => assertFalse(unlocked_once), 84 | locking: () => {}, 85 | installed: () => {}, 86 | installing: () => assertFalse(unlocked_once), 87 | unlocking: () => unlocked_once = true 88 | } 89 | 90 | const installer1 = install(pkg, logger) 91 | const installer2 = install(pkg, logger) 92 | 93 | const [install1, install2] = await Promise.all([installer1, installer2]) 94 | 95 | for (const installation of [install1, install2]) { 96 | assertEquals(installation.pkg.project, pkg.project) 97 | assertEquals(installation.pkg.version, pkg.version) 98 | assertEquals(installation.path, conf.prefix.join(pkg.project, `v${pkg.version}`)) 99 | } 100 | }) 101 | 102 | } finally { 103 | fetch_stub.restore() 104 | } 105 | }) 106 | -------------------------------------------------------------------------------- /src/plumbing/hydrate.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertRejects } from "@std/assert" 2 | import { describe, it } from "@std/testing/bdd" 3 | import { PackageRequirement } from "../types.ts" 4 | import * as semver from "../utils/semver.ts" 5 | import hydrate from "./hydrate.ts" 6 | 7 | describe("hydrate()", () => { 8 | it("hydrates.1", async function() { 9 | const pkgs = [ 10 | { project: 'nodejs.org', constraint: new semver.Range('*') }, 11 | { project: 'nodejs.org', constraint: new semver.Range('>=18.14') } 12 | ] 13 | 14 | const rv1 = semver.intersect(pkgs[0].constraint, pkgs[1].constraint) 15 | assertEquals(rv1.toString(), '>=18.14') 16 | 17 | const rv = await hydrate(pkgs, (_a: PackageRequirement, _b: boolean) => Promise.resolve([])) 18 | 19 | let nodes = 0 20 | for (const pkg of rv.pkgs) { 21 | if (pkg.project === 'nodejs.org') { 22 | nodes++ 23 | assertEquals(pkg.constraint.toString(), '>=18.14') 24 | } 25 | } 26 | 27 | assertEquals(nodes, 1) 28 | }) 29 | 30 | it("hydrates.2", async function() { 31 | const pkgs = [ 32 | { project: 'pipenv.pypa.io', constraint: new semver.Range('*') }, 33 | { project: 'python.org', constraint: new semver.Range('~3.9') } 34 | ] 35 | 36 | const rv = await hydrate(pkgs, (pkg: PackageRequirement, _dry: boolean) => { 37 | if (pkg.project === 'pipenv.pypa.io') { 38 | return Promise.resolve([ 39 | { project: 'python.org', constraint: new semver.Range('>=3.7') } 40 | ]) 41 | } else { 42 | return Promise.resolve([]) 43 | } 44 | }) 45 | 46 | let nodes = 0 47 | for (const pkg of rv.pkgs) { 48 | if (pkg.project === 'python.org') { 49 | assertEquals(pkg.constraint.toString(), '~3.9') 50 | nodes++ 51 | } 52 | } 53 | 54 | assertEquals(nodes, 1) 55 | }) 56 | 57 | it("hydrates.3", async function() { 58 | const pkgs = [ 59 | { project: 'pipenv.pypa.io', constraint: new semver.Range('*') }, 60 | { project: 'python.org', constraint: new semver.Range('~3.9') } 61 | ] 62 | 63 | const rv = await hydrate(pkgs, (pkg: PackageRequirement, _dry: boolean) => { 64 | if (pkg.project === 'pipenv.pypa.io') { 65 | return Promise.resolve([ 66 | { project: 'python.org', constraint: new semver.Range('~3.9.1') } 67 | ]) 68 | } else { 69 | return Promise.resolve([]) 70 | } 71 | }) 72 | 73 | let nodes = 0 74 | for (const pkg of rv.pkgs) { 75 | if (pkg.project === 'python.org') { 76 | assertEquals(pkg.constraint.toString(), '~3.9.1') 77 | nodes++ 78 | } 79 | } 80 | 81 | assertEquals(nodes, 1) 82 | }) 83 | 84 | it("hydrates.unicode.org", async function() { 85 | const pkgs = [ 86 | { project: 'npmjs.com', constraint: new semver.Range('*') }, 87 | { project: 'python.org', constraint: new semver.Range('~3.9') } 88 | ] 89 | 90 | const rv = await hydrate(pkgs, (pkg: PackageRequirement, _dry: boolean) => { 91 | if (pkg.project === 'python.org') { 92 | return Promise.resolve([ 93 | { project: 'unicode.org', constraint: new semver.Range('^73') } 94 | ]) 95 | } else { 96 | return Promise.resolve([ 97 | { project: 'unicode.org', constraint: new semver.Range('^71') } 98 | ]) 99 | } 100 | }) 101 | 102 | const unicodes = rv.pkgs.filter(x => x.project === 'unicode.org') 103 | const constraints = new Set(unicodes.map(x => x.constraint.toString())) 104 | assertEquals(constraints.size, 2) 105 | assert(constraints.has("^71")) 106 | assert(constraints.has("^73")) 107 | }) 108 | 109 | it("hydrates.cannot-intersect", async function() { 110 | const pkgs = [ 111 | { project: 'npmjs.com', constraint: new semver.Range('*') }, 112 | { project: 'python.org', constraint: new semver.Range('~3.9') } 113 | ] 114 | 115 | const rv = hydrate(pkgs, (pkg: PackageRequirement, _dry: boolean) => { 116 | if (pkg.project === 'python.org') { 117 | return Promise.resolve([ 118 | { project: 'nodejs.com', constraint: new semver.Range('^73') } 119 | ]) 120 | } else { 121 | return Promise.resolve([ 122 | { project: 'nodejs.com', constraint: new semver.Range('^71') } 123 | ]) 124 | } 125 | }) 126 | 127 | await assertRejects(() => rv) 128 | }) 129 | }) 130 | -------------------------------------------------------------------------------- /src/plumbing/which.ts: -------------------------------------------------------------------------------- 1 | import { provides as cache_provides, available as cache_available } from "../hooks/useSyncCache.ts" 2 | import usePantry, { PantryError } from "../hooks/usePantry.ts" 3 | import { PackageRequirement } from "../types.ts" 4 | import * as semver from "../utils/semver.ts" 5 | 6 | export type WhichResult = PackageRequirement & { 7 | shebang: string[] 8 | } 9 | 10 | export default async function which(arg0: string, opts?: { providers?: boolean }): Promise; 11 | export default async function which(arg0: string, opts: { providers?: boolean, all: false }): Promise; 12 | export default async function which(arg0: string, opts: { providers?: boolean, all: true }): Promise; 13 | export default async function which(arg0: string, opts_?: { providers?: boolean, all?: boolean }) { 14 | 15 | const opts = { providers: opts_?.providers ?? true, all: opts_?.all ?? false } 16 | 17 | const rv: WhichResult[] = [] 18 | for await (const result of _which(arg0, opts)) { 19 | if (opts.all) { 20 | rv.push(result) 21 | } else { 22 | return result 23 | } 24 | } 25 | if (!opts.all && rv.length == 0) { 26 | return 27 | } else { 28 | return rv 29 | } 30 | } 31 | 32 | async function *_which(arg0: string, opts: { providers: boolean }): AsyncGenerator { 33 | arg0 = arg0.trim() 34 | /// sanitize and reject anything with path components 35 | if (!arg0 || arg0.includes("/")) return 36 | 37 | const pantry = usePantry() 38 | let found: WhichResult[] = [] 39 | 40 | // don't use the cache if PKGX_PANTRY_PATH is set 41 | if (cache_available()) { 42 | const cached = await cache_provides(arg0) 43 | if (cached) { 44 | for (const project of cached) { 45 | yield { project, constraint: new semver.Range("*"), shebang: [arg0] } 46 | } 47 | // NOTE probs wrong, but we need a rewrite 48 | if (cached.length) return 49 | } 50 | } 51 | 52 | const promises: Promise[] = [] 53 | 54 | for await (const entry of pantry.ls()) { 55 | if (found.length) { 56 | for (const f of found) yield f 57 | found = [] 58 | } 59 | const p = pantry.project(entry).provides().then(providers => { 60 | for (const provider of providers) { 61 | if (provider == arg0) { 62 | const constraint = new semver.Range("*") 63 | found.push({...entry, constraint, shebang: [provider] }) 64 | } else if (arg0.startsWith(provider)) { 65 | // eg. `node^16` symlink 66 | try { 67 | const constraint = new semver.Range(arg0.substring(provider.length)) 68 | found.push({...entry, constraint, shebang: [provider] }) 69 | } catch { 70 | // not a valid semver range; fallthrough 71 | } 72 | } else { 73 | //TODO more efficient to check the prefix fits arg0 first 74 | // eg. if python3 then check if the provides starts with python before 75 | // doing all the regex shit. Matters because there's a *lot* of YAMLs 76 | 77 | let rx = /({{\s*version\.(marketing|major)\s*}})/ 78 | let match = provider.match(rx) 79 | if (!match?.index) continue 80 | const regx = match[2] == 'major' ? '\\d+' : '\\d+\\.\\d+' 81 | const foo = subst(match.index, match.index + match[1].length, provider, `(${regx})`) 82 | rx = new RegExp(`^${foo}$`) 83 | match = arg0.match(rx) 84 | if (match) { 85 | const constraint = new semver.Range(`~${match[1]}`) 86 | found.push({...entry, constraint, shebang: [arg0] }) 87 | } 88 | } 89 | } 90 | }).swallow(PantryError) 91 | 92 | promises.push(p) 93 | 94 | if (opts.providers) { 95 | const pp = pantry.project(entry).provider().then(f => { 96 | if (!f) return 97 | const rv = f(arg0) 98 | if (rv) found.push({ 99 | ...entry, 100 | constraint: new semver.Range('*'), 101 | shebang: [...rv, arg0] 102 | }) 103 | }) 104 | promises.push(pp) 105 | } 106 | } 107 | 108 | await Promise.all(promises) 109 | 110 | // if we didn’t find anything yet then we have to wait on the promises 111 | // otherwise we can ignore them 112 | 113 | for (const f of found) { 114 | yield f 115 | } 116 | } 117 | 118 | const subst = function(start: number, end: number, input: string, what: string) { 119 | return input.substring(0, start) + what + input.substring(end) 120 | } 121 | -------------------------------------------------------------------------------- /src/hooks/useDownload.ts: -------------------------------------------------------------------------------- 1 | import { deno } from "../deps.ts" 2 | const { crypto: crypto_, streams: { writeAll } } = deno 3 | const { crypto } = crypto_ 4 | import { encodeHex } from "jsr:@std/encoding@1" 5 | import { PkgxError, panic } from "../utils/error.ts" 6 | import useConfig from "./useConfig.ts" 7 | import useFetch from "./useFetch.ts" 8 | import Path from "../utils/Path.ts" 9 | import * as fs from "node:fs" 10 | import "../utils/misc.ts" 11 | 12 | interface DownloadOptions { 13 | src: URL 14 | dst?: Path 15 | headers?: Record 16 | logger?: (info: {src: URL, dst: Path, rcvd?: number, total?: number }) => void 17 | } 18 | 19 | export class DownloadError extends PkgxError { 20 | status: number 21 | src: URL 22 | headers?: Record 23 | 24 | constructor(status: number, opts: { src: URL, headers?: Record}) { 25 | super(`http: ${status}: ${opts.src}`) 26 | this.name = 'DownloadError' 27 | this.status = status 28 | this.src = opts.src 29 | this.headers = opts.headers 30 | } 31 | } 32 | 33 | const tmpname = (dst: Path) => dst.parent().join(dst.basename() + ".incomplete") 34 | 35 | async function download(opts: DownloadOptions, chunk?: (blob: Uint8Array) => Promise): Promise { 36 | const [dst, stream] = await the_meat(opts) 37 | 38 | if (stream || chunk) { 39 | const reader = stream ?? fs.createReadStream(dst.string) 40 | 41 | const writer = await (() => { 42 | if (stream) { 43 | dst.parent().mkdir('p') 44 | return Deno.open(tmpname(dst).string, {write: true, create: true, truncate: true}) 45 | } 46 | })() 47 | 48 | for await (const blob of reader) { 49 | const pp: Promise[] = [] 50 | if (writer) pp.push(writeAll(writer, blob)) 51 | if (chunk) pp.push(chunk(blob)) 52 | await Promise.all(pp) 53 | } 54 | 55 | if (reader instanceof fs.ReadStream) { 56 | reader.close() 57 | } 58 | if (writer) { 59 | writer.close() 60 | tmpname(dst).mv({ to: dst, force: true }) 61 | } 62 | } 63 | 64 | return dst 65 | } 66 | 67 | function cache({ for: url }: {for: URL}): Path { 68 | return useConfig().cache 69 | .join(url.protocol.slice(0, -1)) 70 | .join(url.hostname) 71 | .join(hash()) 72 | .mkdir('p') 73 | 74 | function hash() { 75 | let key = url.pathname 76 | if (url.search) key += `?${url.search}` 77 | const blob = new TextEncoder().encode(key) 78 | const hash = crypto.subtle.digestSync("SHA-256", blob) 79 | return encodeHex(hash) 80 | } 81 | } 82 | 83 | export default function useDownload() { 84 | return { 85 | download, 86 | cache 87 | } 88 | } 89 | 90 | 91 | /// internal 92 | 93 | async function the_meat({ src, logger, headers, dst }: DownloadOptions): Promise<[Path, ReadableStream | undefined, number | undefined]> 94 | { 95 | const hash = cache({ for: src }) 96 | const mtime_entry = hash.join("mtime") 97 | const etag_entry = hash.join("etag") 98 | 99 | dst ??= hash.join(new Path(src.pathname).basename()) 100 | 101 | if (logger) logger({ src, dst }) 102 | 103 | if (dst.isReadableFile()) { 104 | headers ??= {} 105 | if (etag_entry.isFile()) { 106 | headers["If-None-Match"] = await etag_entry.read() 107 | } 108 | // sending both if we have them is ChatGPT recommended 109 | // also this fixes getting the mysql.com sources, otherwise it redownloads 400MB every time! 110 | if (mtime_entry.isFile()) { 111 | headers["If-Modified-Since"] = await mtime_entry.read() 112 | } 113 | } 114 | 115 | const rsp = await useFetch(src, { headers }) 116 | 117 | switch (rsp.status) { 118 | case 200: { 119 | const sz = parseInt(rsp.headers.get("Content-Length")!).chuzzle() 120 | 121 | if (logger) logger({ src, dst, total: sz }) 122 | 123 | const reader = rsp.body ?? panic() 124 | 125 | const text = rsp.headers.get("Last-Modified") 126 | if (text) mtime_entry.write({text, force: true}) 127 | const etag = rsp.headers.get("ETag") 128 | if (etag) etag_entry.write({text: etag, force: true}) 129 | 130 | if (!logger) { 131 | return [dst, reader, sz] 132 | } else { 133 | let n = 0 134 | return [dst, reader.pipeThrough(new TransformStream({ 135 | transform: (buf, controller) => { 136 | n += buf.length 137 | logger({ src, dst: dst!, rcvd: n, total: sz }) 138 | controller.enqueue(buf) 139 | }})), sz] 140 | } 141 | } 142 | case 304: { 143 | const sz = (await Deno.stat(dst.string)).size 144 | if (logger) logger({ src, dst, rcvd: sz, total: sz }) 145 | return [dst, undefined, sz] 146 | } 147 | default: 148 | throw new DownloadError(rsp.status, { src, headers }) 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /src/plumbing/install.ts: -------------------------------------------------------------------------------- 1 | import { Package, Installation, StowageNativeBottle } from "../types.ts" 2 | import useOffLicense from "../hooks/useOffLicense.ts" 3 | import useDownload from "../hooks/useDownload.ts" 4 | import { flock } from "../utils/flock.ts" 5 | import useConfig from "../hooks/useConfig.ts" 6 | import useCellar from "../hooks/useCellar.ts" 7 | import useCache from "../hooks/useCache.ts" 8 | import useFetch from "../hooks/useFetch.ts" 9 | import { createHash } from "node:crypto" 10 | import Path from "../utils/Path.ts" 11 | 12 | export default async function install(pkg: Package, logger?: Logger): Promise { 13 | const { project, version } = pkg 14 | 15 | const cellar = useCellar() 16 | const { prefix: PKGX_DIR, options: { compression } } = useConfig() 17 | const stowage = StowageNativeBottle({ pkg: { project, version }, compression }) 18 | const url = useOffLicense('s3').url(stowage) 19 | const tarball = useCache().path(stowage) 20 | const shelf = PKGX_DIR.join(pkg.project) 21 | 22 | logger?.locking?.(pkg) 23 | 24 | const unflock = await flock(shelf.mkdir('p')) 25 | 26 | try { 27 | const already_installed = await cellar.has(pkg) 28 | if (already_installed) { 29 | // some other pkgx instance installed us while we were waiting for the lock 30 | // or potentially we were already installed and the caller is naughty 31 | logger?.installed?.(already_installed) 32 | return already_installed 33 | } 34 | 35 | logger?.downloading?.({pkg}) 36 | 37 | const PATH = Deno.build.os == 'windows' ? "C:\\windows\\system32" : "/usr/bin:/bin" 38 | 39 | const tmpdir = Path.mktemp({ 40 | //TODO dir should not be here ofc 41 | dir: PKGX_DIR.join(".local/tmp").join(pkg.project), 42 | prefix: `v${pkg.version}.` 43 | //NOTE ^^ inside pkgx prefix to avoid TMPDIR is on a different volume problems 44 | }) 45 | const tar_args = compression == 'xz' ? 'xJf' : 'xzf' // laughably confusing 46 | const untar = new Deno.Command("tar", { 47 | args: [tar_args, "-", "--strip-components", (pkg.project.split("/").length + 1).toString()], 48 | stdin: 'piped', stdout: "inherit", stderr: "inherit", 49 | cwd: tmpdir.string, 50 | /// hard coding path to ensure we don’t deadlock trying to use ourselves to untar ourselves 51 | env: { PATH } 52 | }).spawn() 53 | const hasher = createHash("sha256") 54 | const remote_SHA_promise = remote_SHA(new URL(`${url}.sha256sum`)) 55 | const writer = untar.stdin.getWriter() 56 | 57 | let total: number | undefined 58 | let n = 0 59 | await useDownload().download({ 60 | src: url, 61 | dst: tarball, 62 | logger: info => { 63 | logger?.downloading?.({ pkg, ...info }) 64 | total ??= info.total 65 | } 66 | }, blob => { 67 | n += blob.length 68 | hasher.update(blob) 69 | logger?.installing?.({ pkg, progress: total ? n / total : total }) 70 | return writer.write(blob) 71 | }) 72 | 73 | writer.close() 74 | 75 | const untar_exit_status = await untar.status 76 | if (!untar_exit_status.success) { 77 | throw new Error(`tar exited with status ${untar_exit_status.code}`) 78 | } 79 | 80 | const computed_hash_value = hasher.digest("hex") 81 | const checksum = await remote_SHA_promise 82 | 83 | if (computed_hash_value != checksum) { 84 | tarball.rm() 85 | console.error("pkgx: we deleted the invalid tarball. try again?") 86 | throw new Error(`sha: expected: ${checksum}, got: ${computed_hash_value}`) 87 | } 88 | 89 | const path = tmpdir.mv({ to: shelf.join(`v${pkg.version}`) }).chmod(0o755) 90 | const install = { pkg, path } 91 | 92 | logger?.installed?.(install) 93 | 94 | return install 95 | } catch (err) { 96 | tarball.rm() //FIXME resumable downloads! 97 | throw err 98 | } finally { 99 | logger?.unlocking?.(pkg) 100 | await unflock() 101 | } 102 | } 103 | 104 | async function remote_SHA(url: URL) { 105 | const rsp = await useFetch(url) 106 | if (!rsp.ok) throw rsp 107 | const txt = await rsp.text() 108 | return txt.split(' ')[0] 109 | } 110 | 111 | 112 | export interface Logger { 113 | locking?(pkg: Package): void 114 | /// raw http info 115 | downloading?(info: {pkg: Package, src?: URL, dst?: Path, rcvd?: number, total?: number}): void 116 | /// we are simultaneously downloading and untarring the bottle 117 | /// the install progress here is proper and tied to download progress 118 | /// progress is a either a fraction between 0 and 1 or the number of bytes that have been untarred 119 | /// we try to give you the fraction as soon as possible, but you will need to deal with both formats 120 | installing?(info: {pkg: Package, progress: number | undefined}): void 121 | unlocking?(pkg: Package): void 122 | installed?(installation: Installation): void 123 | } 124 | 125 | // deno-lint-ignore no-explicit-any 126 | export function ConsoleLogger(prefix?: any): Logger { 127 | prefix = prefix ? `${prefix}: ` : "" 128 | return { 129 | locking: function() { console.error(`${prefix}locking`, ...arguments) }, 130 | downloading: function() { console.error(`${prefix}downloading`, ...arguments) }, 131 | installing: function() { console.error(`${prefix}installing`, ...arguments) }, 132 | unlocking: function() { console.error(`${prefix}unlocking`, ...arguments) }, 133 | installed: function() { console.error(`${prefix}installed`, ...arguments) } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/hooks/useConfig.ts: -------------------------------------------------------------------------------- 1 | import { flatmap } from "../utils/misc.ts" 2 | import { deno } from "../deps.ts" 3 | import host from "../utils/host.ts" 4 | import Path from "../utils/Path.ts" 5 | 6 | export interface Config { 7 | prefix: Path 8 | pantries: Path[] 9 | cache: Path 10 | data: Path 11 | 12 | dist: string 13 | 14 | options: { 15 | /// prefer xz or gz for bottle downloads 16 | compression: 'xz' | 'gz' 17 | } 18 | 19 | UserAgent?: string 20 | 21 | git?: Path 22 | } 23 | 24 | function platform_cache_default(home: Path, { LOCALAPPDATA }: { LOCALAPPDATA?: string }) { 25 | switch (Deno.build.os) { 26 | case 'darwin': 27 | return home.join('Library/Caches') 28 | case 'windows': 29 | return flatmap(LOCALAPPDATA, Path.abs) ?? home.join('AppData/Local') 30 | default: 31 | return home.join('.cache') 32 | } 33 | } 34 | 35 | function platform_data_home_default(home: Path, { LOCALAPPDATA }: { LOCALAPPDATA?: string }) { 36 | switch (host().platform) { 37 | case 'darwin': 38 | return home.join("Library/Application Support") 39 | case 'windows': { 40 | if (LOCALAPPDATA) { 41 | return new Path(LOCALAPPDATA) 42 | } else { 43 | return home.join("AppData/Local") 44 | }} 45 | default: 46 | return home.join(".local/share") 47 | } 48 | } 49 | 50 | const SEP = Deno.build.os == 'windows' ? ';' : ':' 51 | 52 | export function ConfigDefault(env = Deno.env.toObject()): Config { 53 | const home = flatmap(env['PKGX_HOME'], x => new Path(x)) ?? Path.home() 54 | const prefix = flatmap(env['PKGX_DIR']?.trim(), x => new Path(x)) ?? 55 | flatmap(env['XDG_DATA_HOME'], x => new Path(x).join("pkgx")) ?? 56 | home.join('.pkgx') 57 | const pantries = env['PKGX_PANTRY_PATH']?.split(SEP).compact(x => flatmap(x.trim(), x => Path.abs(x) ?? Path.cwd().join(x))) ?? [] 58 | const cache = ( 59 | (Deno.build.os == 'linux' ? flatmap(env["XDG_CACHE_HOME"], Path.abs) : undefined) 60 | ?? platform_cache_default(home, env) 61 | ).join("pkgx") 62 | const data = ( 63 | (Deno.build.os == 'linux' ? flatmap(env["XDG_DATA_HOME"], Path.abs) : undefined) 64 | ?? platform_data_home_default(home, env) 65 | ).join("pkgx") 66 | const dist = env['PKGX_DIST_URL']?.trim() ?? 'https://dist.pkgx.dev' 67 | const isCI = boolize(env['CI']) ?? false 68 | const UserAgent = flatmap(getv(), v => `libpkgx/${v}`) ?? 'libpkgx' 69 | //TODO prefer 'xz' on Linux (as well) if supported 70 | const compression = !isCI && host().platform == 'darwin' ? 'xz' : 'gz' 71 | 72 | return { 73 | prefix, 74 | pantries, 75 | cache, 76 | data, 77 | dist, 78 | UserAgent, 79 | options: { 80 | compression, 81 | }, 82 | git: git(prefix, env.PATH) 83 | } 84 | } 85 | 86 | function getv(): string | undefined { 87 | if (typeof Deno === 'undefined') { 88 | const path = new Path(deno.fromFileUrl(import.meta.url)).parent().parent().parent().join("package.json") 89 | const blob = Deno.readFileSync(path.string) 90 | const txt = new TextDecoder().decode(blob) 91 | const { version } = JSON.parse(txt) 92 | return typeof version == 'string' ? version : undefined 93 | } 94 | } 95 | 96 | const gt = globalThis as unknown as {sh_pkgx_config?: Config} 97 | 98 | export default function useConfig(input?: Config): Config { 99 | // storing on globalThis so our config is shared across 100 | // potentially multiple versions of libpkgx being loaded in the same process 101 | if (!gt.sh_pkgx_config || input) { 102 | gt.sh_pkgx_config = input ?? ConfigDefault() 103 | } 104 | return {...gt.sh_pkgx_config} // copy to prevent mutation 105 | } 106 | 107 | function boolize(input: string | undefined): boolean | undefined { 108 | switch (input?.trim()?.toLowerCase()) { 109 | case '0': 110 | case 'false': 111 | case 'no': 112 | return false 113 | case '1': 114 | case 'true': 115 | case 'yes': 116 | return true 117 | } 118 | } 119 | 120 | function initialized() { 121 | return gt.sh_pkgx_config !== undefined 122 | } 123 | 124 | export const _internals = { initialized, boolize } 125 | 126 | 127 | /// we support a pkgx installed or system installed git, nothing else 128 | /// eg. `git` could be a symlink in `PATH` to pkgx, which would cause a fork bomb 129 | /// on darwin if xcode or xcode/clt is not installed this will fail to our http fallback above 130 | //TODO be able to use our own git if installed 131 | //NOTE however we don’t want to have to fully hydrate its env when libpkgx is initialized only when needed so… 132 | function git(_prefix: Path, PATH?: string): Path | undefined { 133 | return usr() 134 | 135 | function usr() { 136 | // only return /usr/bin if in the PATH so user can explicitly override this 137 | const rv = PATH?.split(":")?.includes("/usr/bin") ? new Path("/usr") : undefined 138 | 139 | return (() => { 140 | /// don’t cause macOS to abort and then prompt the user to install the XcodeCLT 141 | //FIXME test! but this is hard to test without docker images or something! 142 | switch (host().platform) { 143 | case 'darwin': 144 | if (new Path("/Library/Developer/CommandLineTools/usr/bin/git").isExecutableFile()) return rv 145 | if (new Path("/Applications/Xcode.app").isDirectory()) return rv 146 | return // probably won’t work without prompting the user to install the XcodeCLT 147 | case "linux": 148 | return rv 149 | case "windows": 150 | if (PATH) { 151 | //FIXME this is GitHub Actions specific 152 | return new Path('C:\Program Files\Git\cmd\git.exe') 153 | } 154 | } 155 | })()?.join("bin/git") 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/porcelain/run.ts: -------------------------------------------------------------------------------- 1 | import install, { Logger } from "../plumbing/install.ts" 2 | import useShellEnv from '../hooks/useShellEnv.ts' 3 | import usePantry from '../hooks/usePantry.ts' 4 | import hydrate from "../plumbing/hydrate.ts" 5 | import resolve from "../plumbing/resolve.ts" 6 | import { PkgxError } from "../utils/error.ts" 7 | import { spawn } from "node:child_process" 8 | import useSync from "../hooks/useSync.ts" 9 | import which from "../plumbing/which.ts" 10 | import link from "../plumbing/link.ts" 11 | import { is_what } from "../deps.ts" 12 | import Path from "../utils/Path.ts" 13 | const { isArray } = is_what 14 | 15 | interface OptsEx { 16 | env?: Record 17 | logger?: Logger 18 | } 19 | 20 | type Options = { 21 | stdout?: boolean 22 | stderr?: boolean 23 | status?: boolean 24 | } & OptsEx 25 | 26 | type Cmd = string | (string | Path)[] 27 | 28 | /// if you pass a single string we call that string via /bin/sh 29 | /// if you don’t want that pass an array of args 30 | export default async function run(cmd: Cmd, opts?: OptsEx): Promise; 31 | export default async function run(cmd: Cmd, opts: {stdout: true} & OptsEx): Promise<{ stdout: string }>; 32 | export default async function run(cmd: Cmd, opts: {stderr: true} & OptsEx): Promise<{ stderr: string }>; 33 | export default async function run(cmd: Cmd, opts: {status: true} & OptsEx): Promise<{ status: number }>; 34 | export default async function run(cmd: Cmd, opts: {stdout: true, stderr: true} & OptsEx): Promise<{ stdout: string, stderr: string }>; 35 | export default async function run(cmd: Cmd, opts: {stdout: true, status: true} & OptsEx): Promise<{ stdout: string, status: number }>; 36 | export default async function run(cmd: Cmd, opts: {stderr: true, status: true} & OptsEx): Promise<{ stderr: string, status: number }>; 37 | export default async function run(cmd: Cmd, opts: {stdout: true, stderr: true, status: true } & OptsEx): Promise<{ stdout: string, stderr: string, status: number }>; 38 | export default async function run(cmd: Cmd, opts?: Options): Promise { 39 | 40 | const { usesh, arg0: whom } = (() => { 41 | if (!isArray(cmd)) { 42 | const s = cmd.trim() 43 | const i = s.indexOf(' ') 44 | if (i == -1) { 45 | cmd = [] 46 | return { usesh: false, arg0: s } 47 | } else if (Deno.build.os == 'windows') { 48 | cmd = cmd.split(/\s+/) 49 | const arg0 = cmd.shift()! as string 50 | return { usesh: false, arg0 } 51 | } else { 52 | const arg0 = s.slice(0, i) 53 | cmd = s.slice(i + 1) 54 | return { usesh: true, arg0 } 55 | } 56 | } else if (cmd.length == 0) { 57 | throw new RunError('EUSAGE', `\`cmd\` evaluated empty: ${cmd}`) 58 | } else { 59 | return { 60 | usesh: false, 61 | arg0: cmd.shift()!.toString().trim() 62 | } 63 | } 64 | })() 65 | 66 | const { env, shebang } = await setup(whom, opts?.env ?? Deno.env.toObject(), opts?.logger) 67 | const arg0 = usesh ? '/bin/sh' : shebang.shift()! 68 | const args = usesh 69 | ? ['-c', `${shebang.join(' ')} ${cmd}`] 70 | : [...shebang, ...(cmd as (string | Path)[]).map(x => x.toString())] 71 | 72 | return new Promise((resolve, reject) => { 73 | const proc = spawn(arg0, args, { 74 | env, 75 | stdio: [ 76 | "pipe", 77 | opts?.stdout ? 'pipe' : 'inherit', 78 | opts?.stderr ? 'pipe' : 'inherit' 79 | ], 80 | /// on windows .bat files are not executable unless invoked via a shell 81 | /// our provides database deliberately excludes `.bat` so that the same 82 | /// filename is used for all platforms, this works since provided we use 83 | /// a shell to execute, we don’t need to know the extension 84 | shell: Deno.build.os == 'windows' 85 | }) 86 | 87 | let stdout = '', stderr = '' 88 | proc.stdout?.on('data', data => stdout += data) 89 | proc.stderr?.on('data', data => stderr += data) 90 | proc.on('close', status => { 91 | if (status && !opts?.status) { 92 | const err = new RunError('EIO', `${cmd} exited with: ${status}`) 93 | err.cause = status 94 | reject(err) 95 | } else { 96 | const fulfill = resolve as ({}) => void 97 | fulfill({ stdout, stderr, status }) 98 | } 99 | }) 100 | }) 101 | } 102 | 103 | async function setup(cmd: string, env: Record, logger: Logger | undefined) { 104 | const pantry = usePantry() 105 | const sh = useShellEnv() 106 | const { install, link } = _internals 107 | 108 | if (pantry.missing()) { 109 | await useSync() 110 | } 111 | 112 | const wut = await which(cmd) 113 | if (!wut) throw new RunError('ENOENT', `No project in pantry provides ${cmd}`) 114 | 115 | const { pkgs } = await hydrate(wut) 116 | const { pending, installed } = await resolve(pkgs) 117 | for (const pkg of pending) { 118 | const installation = await install(pkg, logger) 119 | await link(installation) 120 | installed.push(installation) 121 | } 122 | 123 | const pkgenv = await sh.map({ installations: installed }) 124 | 125 | for (const [key, value] of Object.entries(env)) { 126 | if (!value) { 127 | continue 128 | } else if (pkgenv[key]) { 129 | pkgenv[key].push(value) 130 | } else { 131 | pkgenv[key] = [value] 132 | } 133 | } 134 | 135 | return { env: sh.flatten(pkgenv), shebang: wut.shebang } 136 | } 137 | 138 | 139 | type RunErrorCode = 'ENOENT' | 'EUSAGE' | 'EIO' 140 | 141 | export class RunError extends PkgxError { 142 | code: RunErrorCode 143 | 144 | constructor(code: RunErrorCode, message: string) { 145 | super(message) 146 | this.code = code 147 | } 148 | } 149 | 150 | const _internals = { 151 | install, 152 | link 153 | } -------------------------------------------------------------------------------- /src/hooks/useShellEnv.ts: -------------------------------------------------------------------------------- 1 | import { Installation } from "../types.ts" 2 | import usePantry from "./usePantry.ts" 3 | import host from "../utils/host.ts" 4 | 5 | export const EnvKeys = [ 6 | 'PATH', 7 | 'MANPATH', 8 | 'PKG_CONFIG_PATH', 9 | 'LIBRARY_PATH', 10 | 'LD_LIBRARY_PATH', 11 | 'CPATH', 12 | 'XDG_DATA_DIRS', 13 | 'CMAKE_PREFIX_PATH', 14 | 'DYLD_FALLBACK_LIBRARY_PATH', 15 | 'SSL_CERT_FILE', 16 | 'LDFLAGS', 17 | 'PKGX_DIR', 18 | 'ACLOCAL_PATH' 19 | ] as const 20 | export type EnvKey = typeof EnvKeys[number] 21 | 22 | interface Options { 23 | installations: Installation[] 24 | } 25 | 26 | export default function() { 27 | return { 28 | map, 29 | expand, 30 | flatten 31 | } 32 | } 33 | 34 | /// returns an environment that supports the provided packages 35 | async function map({installations}: Options): Promise> { 36 | const vars: Partial>> = {} 37 | const isMac = host().platform == 'darwin' 38 | 39 | const projects = new Set(installations.map(x => x.pkg.project)) 40 | const has_cmake = projects.has('cmake.org') 41 | const archaic = true 42 | 43 | const rv: Record = {} 44 | const seen = new Set() 45 | 46 | for (const installation of installations) { 47 | 48 | if (!seen.insert(installation.pkg.project).inserted) { 49 | console.warn("pkgx: env is being duped:", installation.pkg.project) 50 | } 51 | 52 | for (const key of EnvKeys) { 53 | for (const suffix of suffixes(key)!) { 54 | vars[key] = compact_add(vars[key], installation.path.join(suffix).chuzzle()?.string) 55 | } 56 | } 57 | 58 | if (archaic) { 59 | vars.LIBRARY_PATH = compact_add(vars.LIBRARY_PATH, installation.path.join("lib").chuzzle()?.string) 60 | vars.CPATH = compact_add(vars.CPATH, installation.path.join("include").chuzzle()?.string) 61 | } 62 | 63 | if (has_cmake) { 64 | vars.CMAKE_PREFIX_PATH = compact_add(vars.CMAKE_PREFIX_PATH, installation.path.string) 65 | } 66 | 67 | if (projects.has('gnu.org/autoconf')) { 68 | vars.ACLOCAL_PATH = compact_add(vars.ACLOCAL_PATH, installation.path.join("share/aclocal").chuzzle()?.string) 69 | } 70 | 71 | if (installation.pkg.project === 'openssl.org') { 72 | const certPath = installation.path.join("ssl/cert.pem").chuzzle()?.string 73 | // this is a single file, so we assume a 74 | // valid entry is correct 75 | if (certPath) { 76 | vars.SSL_CERT_FILE = new OrderedSet() 77 | vars.SSL_CERT_FILE.add(certPath) 78 | } 79 | } 80 | 81 | // pantry configured runtime environment 82 | const runtime = await usePantry().project(installation.pkg).runtime.env(installation.pkg.version, installations) 83 | for (const key in runtime) { 84 | rv[key] ??= [] 85 | rv[key].push(runtime[key]) 86 | } 87 | } 88 | 89 | // this is how we use precise versions of libraries 90 | // for your virtual environment 91 | //FIXME SIP on macOS prevents DYLD_FALLBACK_LIBRARY_PATH from propagating to grandchild processes 92 | if (vars.LIBRARY_PATH) { 93 | vars.LD_LIBRARY_PATH = vars.LIBRARY_PATH 94 | if (isMac) { 95 | // non FALLBACK variety causes strange issues in edge cases 96 | // where our symbols somehow override symbols from the macOS system 97 | vars.DYLD_FALLBACK_LIBRARY_PATH = vars.LIBRARY_PATH 98 | } 99 | } 100 | 101 | for (const key of EnvKeys) { 102 | //FIXME where is this `undefined` __happening__? 103 | if (vars[key] === undefined || vars[key]!.isEmpty()) continue 104 | rv[key] = vars[key]!.toArray() 105 | } 106 | 107 | // don’t break `man` lol 108 | rv["MANPATH"]?.push("/usr/share/man") 109 | // https://github.com/pkgxdev/libpkgx/issues/70 110 | rv['XDG_DATA_DIRS']?.push('/usr/local/share:/usr/share') 111 | 112 | return rv 113 | } 114 | 115 | function suffixes(key: EnvKey) { 116 | switch (key) { 117 | case 'PATH': 118 | return ["bin", "sbin"] 119 | case 'MANPATH': 120 | return ["man", "share/man"] 121 | case 'PKG_CONFIG_PATH': 122 | return ['share/pkgconfig', 'lib/pkgconfig'] 123 | case 'XDG_DATA_DIRS': 124 | return ['share'] 125 | case 'LIBRARY_PATH': 126 | case 'LD_LIBRARY_PATH': 127 | case 'DYLD_FALLBACK_LIBRARY_PATH': 128 | case 'CPATH': 129 | case 'CMAKE_PREFIX_PATH': 130 | case 'SSL_CERT_FILE': 131 | case 'LDFLAGS': 132 | case 'PKGX_DIR': 133 | case 'ACLOCAL_PATH': 134 | return [] // we handle these specially 135 | default: { 136 | const exhaustiveness_check: never = key 137 | throw new Error(`unhandled id: ${exhaustiveness_check}`) 138 | }} 139 | } 140 | 141 | export function expand(env: Record) { 142 | let rv = '' 143 | for (const [key, value] of Object.entries(env)) { 144 | if (value.length == 0) continue 145 | rv += `export ${key}="${value.join(":")}"\n` 146 | } 147 | return rv 148 | } 149 | 150 | export function flatten(env: Record) { 151 | const SEP = Deno.build.os == 'windows' ? ';' : ':' 152 | const rv: Record = {} 153 | for (const [key, value] of Object.entries(env)) { 154 | rv[key] = value.join(SEP) 155 | } 156 | return rv 157 | } 158 | 159 | function compact_add(set: OrderedSet | undefined, item: T | null | undefined): OrderedSet { 160 | if (!set) set = new OrderedSet() 161 | if (item) set.add(item) 162 | 163 | return set 164 | } 165 | 166 | class OrderedSet { 167 | private items: T[]; 168 | private set: Set; 169 | 170 | constructor() { 171 | this.items = []; 172 | this.set = new Set(); 173 | } 174 | 175 | add(item: T): void { 176 | if (!this.set.has(item)) { 177 | this.items.push(item); 178 | this.set.add(item); 179 | } 180 | } 181 | 182 | toArray(): T[] { 183 | return [...this.items]; 184 | } 185 | 186 | isEmpty(): boolean { 187 | return this.items.length == 0 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /src/plumbing/hydrate.ts: -------------------------------------------------------------------------------- 1 | import { PackageRequirement, Package } from "../types.ts" 2 | import * as semver from "../utils/semver.ts" 3 | import usePantry from "../hooks/usePantry.ts" 4 | import { is_what } from "../deps.ts" 5 | const { isArray } = is_what 6 | 7 | 8 | //TODO linktime cyclic dependencies cannot be allowed 9 | //NOTE however if they aren’t link time it's presumably ok in some scenarios 10 | // eg a tool that lists a directory may depend on a tool that identifies the 11 | // mime types of files which could depend on the listing tool 12 | //FIXME actually we are not refining the constraints currently 13 | //TODO we are not actually restricting subsequent asks, eg. deno^1 but then deno^1.2 14 | 15 | 16 | interface ReturnValue { 17 | /// full list topologically sorted (ie dry + wet) 18 | pkgs: PackageRequirement[] 19 | 20 | /// your input, but version constraints refined based on the whole graph 21 | /// eg. you hydrate the graph for a and b, but b depends on a tighter range of a than you input 22 | dry: PackageRequirement[] 23 | 24 | /// packages that were not supplied to input or that require bootstrap 25 | wet: PackageRequirement[] 26 | 27 | /// the graph cycles at these packages 28 | /// this is only a problem if you need to build one of these, 29 | // in which case TADA! here's the list! 30 | bootstrap_required: Set 31 | } 32 | 33 | const get = (x: PackageRequirement) => usePantry().project(x).runtime.deps() 34 | 35 | /// sorts a list of packages topologically based on their 36 | /// dependencies. Throws if there is a cycle in the input. 37 | /// ignores changes in dependencies based on versions 38 | export default async function hydrate( 39 | input: (PackageRequirement | Package)[] | (PackageRequirement | Package), 40 | get_deps: (pkg: PackageRequirement, dry: boolean) => Promise = get, 41 | ): Promise 42 | { 43 | if (!isArray(input)) input = [input] 44 | 45 | const dry = condense(input.map(spec => { 46 | if ("version" in spec) { 47 | return {project: spec.project, constraint: new semver.Range(`=${spec.version}`)} 48 | } else { 49 | return spec 50 | } 51 | })) 52 | 53 | const graph: Record = {} 54 | const bootstrap = new Set() 55 | const initial_set = new Set(dry.map(x => x.project)) 56 | const stack: Node[] = [] 57 | 58 | const additional_unicodes: semver.Range[] = [] 59 | 60 | // Starting the DFS loop for each package in the dry list 61 | for (const pkg of dry) { 62 | let new_node = graph[pkg.project] 63 | if (new_node) { 64 | // Intersect constraints for existing nodes 65 | new_node.pkg.constraint = semver.intersect(new_node.pkg.constraint, pkg.constraint) 66 | } else { 67 | new_node = new Node(pkg) 68 | graph[pkg.project] = new_node 69 | stack.push(new_node) 70 | } 71 | 72 | while (stack.length > 0) { 73 | const current_node = stack.pop()! 74 | const children = current_node.children 75 | 76 | for (const dep of await get_deps(current_node.pkg, initial_set.has(current_node.project))) { 77 | if (children.has(dep.project)) { 78 | if (!bootstrap.has(dep.project)) { 79 | console.warn(`pkgx: cyclic dep: ${dep.project}: ${current_node.project}`) 80 | bootstrap.add(dep.project) 81 | } 82 | } else { 83 | let child_node = graph[dep.project] 84 | if (child_node) { 85 | try { 86 | // Intersect constraints 87 | child_node.pkg.constraint = semver.intersect(child_node.pkg.constraint, dep.constraint) 88 | } catch (e) { 89 | if (dep.project == 'unicode.org') { 90 | // we handle unicode.org for now to allow situations like: 91 | // https://github.com/pkgxdev/pantry/issues/4104 92 | // https://github.com/pkgxdev/pkgx/issues/899 93 | additional_unicodes.push(dep.constraint) 94 | } else { 95 | throw e 96 | } 97 | } 98 | } else { 99 | child_node = new Node(dep, current_node) 100 | graph[dep.project] = child_node 101 | stack.push(child_node) 102 | } 103 | current_node.children.add(dep.project) 104 | } 105 | } 106 | } 107 | } 108 | 109 | // Sorting and constructing the return value 110 | const pkgs = Object.values(graph) 111 | .sort((a, b) => b.count() - a.count()) 112 | .map(({pkg}) => pkg) 113 | 114 | // see above explanation 115 | pkgs.push(...additional_unicodes.map(constraint => ({ project: "unicode.org", constraint }))) 116 | 117 | //TODO strictly we need to record precisely the bootstrap version constraint 118 | const bootstrap_required = new Set(pkgs.compact(({project}) => bootstrap.has(project) && project)) 119 | 120 | return { 121 | pkgs, 122 | dry: pkgs.filter(({project}) => initial_set.has(project)), 123 | wet: pkgs.filter(({project}) => !initial_set.has(project) || bootstrap_required.has(project)), 124 | bootstrap_required 125 | } 126 | } 127 | 128 | function condense(pkgs: PackageRequirement[]) { 129 | const out: PackageRequirement[] = [] 130 | for (const pkg of pkgs) { 131 | const found = out.find(x => x.project === pkg.project) 132 | if (found) { 133 | found.constraint = semver.intersect(found.constraint, pkg.constraint) 134 | } else { 135 | out.push(pkg) 136 | } 137 | } 138 | return out 139 | } 140 | 141 | 142 | /////////////////////////////////////////////////////////////////////////// lib 143 | class Node { 144 | parent: Node | undefined 145 | readonly pkg: PackageRequirement 146 | readonly project: string 147 | children: Set = new Set() 148 | 149 | constructor(pkg: PackageRequirement, parent?: Node) { 150 | this.parent = parent 151 | this.pkg = pkg 152 | this.project = pkg.project 153 | } 154 | 155 | count(): number { 156 | let n = 0 157 | // deno-lint-ignore no-this-alias 158 | let node: Node | undefined = this 159 | while ((node = node?.parent)) n++ 160 | return n 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /src/hooks/useSyncCache.ts: -------------------------------------------------------------------------------- 1 | import { Database } from "../../vendor/sqlite3@0.10.0/mod.ts"; 2 | import * as pkgutils from "../utils/pkg.ts"; 3 | import usePantry from "./usePantry.ts"; 4 | import useConfig from "./useConfig.ts"; 5 | 6 | export default async function() { 7 | if (Deno.build.os == 'windows') return 8 | 9 | const path = useConfig().cache.join('pantry.db').rm() // delete it first so pantry instantiation doesn't use cache 10 | const { ls, ...pantry } = usePantry() 11 | 12 | const sqlite3 = (await install_sqlite())?.string 13 | if (!sqlite3) return 14 | const db = new Database(path.string, { sqlite3 }) 15 | 16 | // unique or don’t insert what is already there or just dump tables first perhaps 17 | 18 | try { 19 | // speeds up by using memory as much as possible 20 | db.exec(` 21 | PRAGMA synchronous = OFF; 22 | PRAGMA journal_mode = MEMORY; 23 | PRAGMA temp_store = MEMORY; 24 | `); 25 | 26 | await db.transaction(async () => { 27 | db.exec(` 28 | DROP TABLE IF EXISTS provides; 29 | DROP TABLE IF EXISTS dependencies; 30 | DROP TABLE IF EXISTS companions; 31 | DROP TABLE IF EXISTS runtime_env; 32 | CREATE TABLE provides ( 33 | project TEXT, 34 | program TEXT 35 | ); 36 | CREATE TABLE dependencies ( 37 | project TEXT, 38 | pkgspec TEXT 39 | ); 40 | CREATE TABLE companions ( 41 | project TEXT, 42 | pkgspec TEXT 43 | ); 44 | CREATE TABLE runtime_env ( 45 | project TEXT, 46 | envline TEXT 47 | ); 48 | CREATE INDEX idx_project ON provides(project); 49 | CREATE INDEX idx_program ON provides(program); 50 | CREATE INDEX idx_project_dependencies ON dependencies(project); 51 | CREATE INDEX idx_project_companions ON companions(project); 52 | `); 53 | 54 | for await (const pkg of ls()) { 55 | if (!pkg.path.string.startsWith(pantry.prefix.string)) { 56 | // don’t cache PKGX_PANTRY_PATH additions 57 | continue; 58 | } 59 | 60 | try { 61 | const project = pantry.project(pkg.project) 62 | const [programs, deps, companions, yaml] = await Promise.all([ 63 | project.provides(), 64 | project.runtime.deps(), 65 | project.companions(), 66 | project.yaml() 67 | ]) 68 | 69 | for (const program of programs) { 70 | db.exec(`INSERT INTO provides (project, program) VALUES ('${pkg.project}', '${program}');`); 71 | } 72 | 73 | for (const dep of deps) { 74 | db.exec(`INSERT INTO dependencies (project, pkgspec) VALUES ('${pkg.project}', '${pkgutils.str(dep)}')`); 75 | } 76 | 77 | for (const companion of companions) { 78 | db.exec(`INSERT INTO companions (project, pkgspec) VALUES ('${pkg.project}', '${pkgutils.str(companion)}')`); 79 | } 80 | 81 | for (const [key, value] of Object.entries(yaml.runtime?.env ?? {})) { 82 | db.exec(`INSERT INTO runtime_env (project, envline) VALUES ('${pkg.project}', '${key}=${value}')`); 83 | } 84 | } catch { 85 | console.warn("corrupt yaml", pkg.path) 86 | } 87 | } 88 | })() 89 | } catch (err) { 90 | path.rm() 91 | throw err 92 | } finally { 93 | db.close(); 94 | } 95 | } 96 | 97 | export async function provides(program: string) { 98 | const db = await _db() 99 | if (!db) return 100 | try { 101 | return db.sql`SELECT project FROM provides WHERE program = ${program}`.map(x => x.project); 102 | } finally { 103 | db.close() 104 | } 105 | } 106 | 107 | export async function dependencies(project: string) { 108 | const db = await _db() 109 | if (!db) return 110 | try { 111 | return db.sql`SELECT pkgspec FROM dependencies WHERE project = ${project}`.map(x => pkgutils.parse(x.pkgspec)); 112 | } finally { 113 | db.close() 114 | } 115 | } 116 | 117 | export async function completion(prefix: string) { 118 | const db = await _db() 119 | try { 120 | return db?.prepare(`SELECT program FROM provides WHERE program LIKE '${prefix}%'`).value<[string]>()!; 121 | } finally { 122 | db?.close() 123 | } 124 | } 125 | 126 | /// is the cache available? 127 | export function available() { 128 | if (Deno.build.os == 'windows') { 129 | return false 130 | } else { 131 | const path = useConfig().cache.join('pantry.db') 132 | return path.isFile() 133 | } 134 | } 135 | 136 | export async function companions(project: string) { 137 | const db = await _db() 138 | if (!db) return 139 | try { 140 | return db.sql`SELECT pkgspec FROM companions WHERE project = ${project}`.map(x => pkgutils.parse(x.pkgspec)); 141 | } finally { 142 | db.close() 143 | } 144 | } 145 | 146 | export async function runtime_env(project: string) { 147 | const db = await _db() 148 | if (!db) return 149 | try { 150 | const rv: Record = {} 151 | for (const {envline: line} of db.sql`SELECT envline FROM runtime_env WHERE project = ${project}`) { 152 | const [key, ...rest] = line.split("=") 153 | rv[key] = rest.join('=') 154 | } 155 | return rv 156 | } finally { 157 | db.close() 158 | } 159 | } 160 | 161 | import useCellar from "./useCellar.ts" 162 | 163 | async function _db() { 164 | try { 165 | if (Deno.build.os == 'windows') return 166 | const path = useConfig().cache.join('pantry.db') 167 | if (!path.isFile()) return 168 | const sqlite = await useCellar().has({ project: "sqlite.org", constraint: new semver.Range('*') }) 169 | if (!sqlite) return 170 | const ext = host().platform == 'darwin' ? 'dylib' : 'so' 171 | return new Database(path.string, {readonly: true, sqlite3: sqlite.path.join(`lib/libsqlite3.${ext}`).string}) 172 | } catch { 173 | console.warn("couldn’t load pantry.db") 174 | return 175 | } 176 | } 177 | 178 | import install from "../porcelain/install.ts" 179 | import host from "../utils/host.ts"; 180 | import Path from "../utils/Path.ts"; 181 | import { semver } from "../../mod.ts"; 182 | 183 | async function install_sqlite(): Promise { 184 | const foo = await install("sqlite.org") 185 | for (const bar of foo) { 186 | if (bar.pkg.project == 'sqlite.org') { 187 | const ext = host().platform == 'darwin' ? 'dylib' : 'so' 188 | return bar.path.join(`lib/libsqlite3.${ext}`) 189 | } 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /src/plumbing/resolve.test.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file require-await 2 | import { assert, assertEquals, fail, assertRejects } from "@std/assert" 3 | import { Installation, Package, PackageRequirement } from "../types.ts" 4 | import { useTestConfig } from "../hooks/useTestConfig.ts" 5 | import useInventory from "../hooks/useInventory.ts" 6 | import resolve, { _internals } from "./resolve.ts" 7 | import useCellar from "../hooks/useCellar.ts" 8 | import * as semver from "../utils/semver.ts" 9 | import { stub } from "@std/testing/mock" 10 | import SemVer from "../utils/semver.ts" 11 | import Path from "../utils/Path.ts" 12 | 13 | Deno.test("resolve cellar.has", { 14 | permissions: {'read': true, 'env': ["TMPDIR", "TMP", "TEMP"], 'write': [Deno.env.get("TMPDIR") || Deno.env.get("TMP") || Deno.env.get("TEMP") || "/tmp"] } 15 | }, async runner => { 16 | const prefix = useTestConfig().prefix 17 | const pkg = { project: "foo", version: new SemVer("1.0.0") } 18 | 19 | const cellar = useCellar() 20 | const has = async (pkg_: Package | PackageRequirement | Path) => { 21 | if (pkg_ instanceof Path) fail() 22 | if (pkg.project == pkg_.project) { 23 | if ('constraint' in pkg_ && !pkg_.constraint.satisfies(pkg.version)) return 24 | if ('version' in pkg_ && !pkg_.version.eq(pkg.version)) return 25 | const a: Installation = {pkg, path: prefix.join(pkg.project, `v${pkg.version}`) } 26 | return a 27 | } 28 | } 29 | 30 | await runner.step("happy path", async () => { 31 | const stub1 = stub(_internals, "useInventory", () => ({ 32 | get: () => fail(), 33 | select: () => Promise.resolve(pkg.version) 34 | })) 35 | const stub2 = stub(_internals, "useCellar", () => ({ 36 | ...cellar, has 37 | })) 38 | 39 | try { 40 | const rv = await resolve([pkg]) 41 | assertEquals(rv.pkgs[0].project, pkg.project) 42 | assertEquals(rv.installed[0].pkg.project, pkg.project) 43 | } finally { 44 | stub1.restore() 45 | stub2.restore() 46 | } 47 | }) 48 | 49 | await runner.step("throws if no version", async () => { 50 | const stub1 = stub(_internals, "useInventory", () => ({ 51 | get: () => fail(), 52 | select: () => Promise.resolve(undefined), 53 | })) 54 | const stub2 = stub(_internals, "useCellar", () => ({ 55 | ...cellar, 56 | has: () => Promise.resolve(undefined) 57 | })) 58 | 59 | let errord = false 60 | try { 61 | await resolve([{ ...pkg, version: new SemVer("1.0.1") }]) 62 | } catch { 63 | errord = true 64 | } finally { 65 | stub1.restore() 66 | stub2.restore() 67 | } 68 | assert(errord) 69 | }) 70 | 71 | await runner.step("uses existing version if it is the latest even if update set", async () => { 72 | const stub1 = stub(_internals, "useInventory", () => ({ 73 | get: () => fail(), 74 | select: () => Promise.resolve(pkg.version), 75 | })) 76 | const stub2 = stub(_internals, "useCellar", () => ({ 77 | ...cellar, has 78 | })) 79 | 80 | try { 81 | const rv = await resolve([pkg], { update: true }) 82 | assertEquals(rv.pkgs[0].project, pkg.project) 83 | assertEquals(rv.installed[0].pkg.project, pkg.project) 84 | } finally { 85 | stub1.restore() 86 | stub2.restore() 87 | } 88 | }) 89 | 90 | await runner.step("updates version if latest is not installed when update is set", async runner => { 91 | const stub1 = stub(_internals, "useInventory", () => ({ 92 | get: () => fail(), 93 | select: () => Promise.resolve(new SemVer("1.0.1")), 94 | })) 95 | const stub2 = stub(_internals, "useCellar", () => ({ 96 | ...cellar, has 97 | })) 98 | 99 | try { 100 | await runner.step("update: true", async () => { 101 | const rv = await resolve([{ project: pkg.project, constraint: new semver.Range("^1") }], { update: true }) 102 | assertEquals(rv.pkgs[0].project, pkg.project) 103 | assertEquals(rv.pending[0].project, pkg.project) 104 | assertEquals(rv.pending[0].version, new SemVer("1.0.1")) 105 | }) 106 | 107 | await runner.step("update: set", async () => { 108 | const update = new Set([pkg.project]) 109 | const rv = await resolve([{ project: pkg.project, constraint: new semver.Range("^1") }], { update }) 110 | assertEquals(rv.pkgs[0].project, pkg.project) 111 | assertEquals(rv.pending[0].project, pkg.project) 112 | assertEquals(rv.pending[0].version, new SemVer("1.0.1")) 113 | }) 114 | } finally { 115 | stub1.restore() 116 | stub2.restore() 117 | } 118 | }) 119 | }) 120 | 121 | const permissions = { net: false, read: true, env: ["TMPDIR", "HOME", "TMP", "TEMP"], write: true /*FIXME*/ } 122 | 123 | // https://github.com/pkgxdev/cli/issues/655 124 | Deno.test("postgres@500 fails", { permissions }, async () => { 125 | useTestConfig() 126 | 127 | const pkg = { 128 | project: "posqtgres.org", 129 | version: new SemVer("15.0.1") 130 | } 131 | 132 | const select = useInventory().select 133 | const stub1 = stub(_internals, "useInventory", () => ({ 134 | get: () => Promise.resolve([pkg.version]), 135 | select, 136 | })) 137 | 138 | const pkgs = [ 139 | { project: pkg.project, constraint: new semver.Range('@500') } 140 | ] 141 | 142 | try { 143 | // https://github.com/pkgxdev/cli/issues/655 144 | await assertRejects(() => resolve(pkgs)) 145 | } finally { 146 | stub1.restore() 147 | } 148 | }) 149 | 150 | // https://github.com/pkgxdev/cli/issues/655 151 | Deno.test("postgres@500 fails if installed", { permissions }, async () => { 152 | const pkg = { 153 | project: "posqtgres.org", 154 | version: new SemVer("15.0.1") 155 | } 156 | const prefix = useTestConfig().prefix 157 | 158 | const cellar = useCellar() 159 | const has = (b: Path | Package | PackageRequirement) => { 160 | if ("constraint" in b && b.constraint.satisfies(pkg.version)) { 161 | const a: Installation = {pkg, path: prefix.join(pkg.project, `v${pkg.version}`) } 162 | return Promise.resolve(a) 163 | } else { 164 | return Promise.resolve(undefined) 165 | } 166 | } 167 | 168 | const select = useInventory().select 169 | const stub1 = stub(_internals, "useInventory", () => ({ 170 | get: () => Promise.resolve([pkg.version]), 171 | select, 172 | })) 173 | const stub2 = stub(_internals, "useCellar", () => ({ 174 | ...cellar, 175 | has 176 | })) 177 | 178 | const pkgs = [ 179 | { project: pkg.project, constraint: new semver.Range('@500') } 180 | ] 181 | 182 | try { 183 | // https://github.com/pkgxdev/cli/issues/655 184 | await assertRejects(() => resolve(pkgs)) 185 | } finally { 186 | stub1.restore() 187 | stub2.restore() 188 | } 189 | }) 190 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![pkgx.dev](https://pkgx.dev/banner.png) 2 | 3 | [![coverage][]][coveralls] 4 | 5 | # libpkgx 6 | 7 | pkgx aims to provide packaging primitives. This library is a route to that 8 | goal. libpkgx can create sandboxed environments for 9 | packages that have no effect on the wider system without you or your user 10 | needing to install [pkgx]. 11 | 12 | ## Getting Started 13 | 14 | ```sh 15 | $ npm install libpkgx 16 | # ^^ https://npmjs.com/libpkgx 17 | ``` 18 | 19 | Or with [Deno]: 20 | 21 | ```ts 22 | import * as pkgx from "https://deno.land/x/libpkgx/mod.ts" 23 | ``` 24 | 25 | ## Usage 26 | 27 | ```ts 28 | import { porcelain } from "libpkgx"; 29 | const { run } = porcelain; 30 | 31 | await run(`python -c 'print("Hello, World!")'`); 32 | // ^^ installs python and its deps (into ~/.pkgx/python.org/v3.x.y) 33 | // ^^ runs the command 34 | // ^^ output goes to the terminal 35 | // ^^ throws on execution error or non-zero exit code 36 | // ^^ executes via `/bin/sh` (so quoting and that work as expected) 37 | ``` 38 | 39 | Capture stdout easily: 40 | 41 | ```ts 42 | const { stdout } = await run(`ruby -e 'puts ", World!"'`, { stdout: true }); 43 | console.log("Hello,", stdout); 44 | ``` 45 | 46 | > `{ stderr: true }` also works. 47 | 48 | If there’s a non-zero exit code, we `throw`. However, when you need to, 49 | you can capture it instead: 50 | 51 | ```ts 52 | const { status } = await run(`perl -e 'exit(7)'`, { status: true }); 53 | assert(status == 7); // ^^ didn’t throw! 54 | ``` 55 | 56 | > The run function’s options also takes `env` if you need to supplement or 57 | > replace the inherited environment (which is passed by default). 58 | 59 | Need a specific version of something? [pkgx] can install any version 60 | of any package: 61 | 62 | ```ts 63 | await run(["node^16", "-e", "console.log(process.version)"]); 64 | // => v16.18.1 65 | ``` 66 | 67 | > Notice we passed args as `string[]`. This is also supported and is often 68 | > preferable since shell quoting rules can be tricky. If you pass `string[]` 69 | > we execute the command directly rather than via `/bin/sh`. 70 | 71 | All of pkgx’s packages are relocatable so you can configure pkgx to install 72 | wherever you want: 73 | 74 | ```ts 75 | import { hooks, Path, porcelain } from "libpkgx"; 76 | const { install } = porcelain; 77 | const { useConfig } = hooks; 78 | 79 | useConfig({ prefix: Path.home().join(".local/share/my-app") }); 80 | // ^^ must be done **before** any other pkgx calls 81 | 82 | const go = await install("go.dev"); 83 | // ^^ go.path = /home/you/.local/share/my-app/go.dev/v1.20.4 84 | ``` 85 | 86 | ### Designed for Composibility 87 | 88 | The library is split into [plumbing](src/plumbing) and [porcelain](src/porcelain) (copying git’s lead). 89 | The porcelain is what most people need, but if you need more control, dive 90 | into the porcelain sources to see how to use the plumbing primitives to get 91 | precisely what you need. 92 | 93 | For example if you want to run a command with node’s `spawn` instead it is 94 | simple enough to first use our porcelain `install` function then grab the 95 | `env` you’ll need to pass to `spawn` using our `useShellEnv` hook. 96 | 97 | Perhaps what you create should go into the porcelain? If so, please open a PR. 98 | 99 | ### Logging 100 | 101 | Most functions take an optional `logger` parameter so you can output logging 102 | information if you so choose. `pkgx` (cli) has a fairly sophisticated logger, so 103 | go check that out if you want. For our porcelain functions we provide a simple 104 | debug-friendly logger (`ConsoleLogger`) that will output everything via 105 | `console.error`: 106 | 107 | ```ts 108 | import { porcelain, plumbing, utils } from "libpkgx" 109 | const { ConsoleLogger } = utils 110 | const { run } = porcelain 111 | 112 | const logger = ConsoleLogger() 113 | await run("youtube-dl youtu.be/xiq5euezOEQ", { logger }).exec() 114 | ``` 115 | 116 | ### Caveats 117 | 118 | We have our own implementation of semver because open source has existed for 119 | decades and Semantic Versioning is much newer than that. Our implementation is 120 | quite compatible but not completely so. Use our semver with libpkgx. 121 | Our implementation is 100% compatible with strings output from node’s own 122 | semver. 123 | 124 | Setting `useConfig()` is not thread safe. Thus if you are using web workers 125 | you must ensure the initial call to `useConfig()` is called on the main thread 126 | before any other calls might happen. We call it explicitly in our code so you 127 | will need to call it yourself in such a case. This is not ideal and we’d 128 | appreciate your help in fixing it. 129 | 130 | The plumbing has no magic. Libraries need well defined behavior. 131 | You’ll need to read the docs to use them effectively. 132 | 133 | libpkgx almost certainly will not work in a browser. Potentially it's possible. 134 | The first step would be compiling our bottles to WASM. We could use your help 135 | with that… 136 | 137 | We use a hook-like pattern because it is great. This library is not itself 138 | designed for React. 139 | 140 | We support the same platforms as [pkgx]. 141 | 142 | ## What Packages are Available? 143 | 144 | We can install anything in the [pantry]. 145 | 146 | If something you need is not there, adding to the pantry has been designed to 147 | be an easy and enjoyable process. Your contribution is both welcome and 148 | desired! 149 | 150 | To see what is available refer to the [pantry] docs or you can run: 151 | `pkgx pkg search foo`. 152 | 153 |   154 | 155 | # Interesting Uses 156 | 157 | * You can grab cURL’s CA certificates which we pkg and keep up to date 158 | (`curl.se/ca-certs`). These are commonly needed across ecosystems but not 159 | always easily accessible. 160 | * grab libraries that wrappers need like openssl or sqlite 161 | * run a real database (like postgres) easily 162 | * load local AI models and their engines 163 | * load libraries and then use ffi to load symbols 164 | 165 |   166 | 167 | # Contributing 168 | 169 | We would be thrilled to hear your ideas† or receive your pull requests. 170 | 171 | > † [discussions][discussion] 172 | 173 | ## Anatomy 174 | 175 | The code is written with Deno (just like [pkgx]) but is compiled to a 176 | node package for wider accessibility (and ∵ [pkgx] is node/electron). 177 | 178 | The library is architected into hooks, plumbing and porcelain. Where the hooks 179 | represent the low level primitives of pkging, the plumbing glues those 180 | primitives together into useful components and the porcelain is a user 181 | friendly *façade* pattern for the plumbing. 182 | 183 | ## Supporting Other Languages 184 | 185 | We would love to port this code to every language. We are deliberately keeping 186 | the scope *tight*. Probably we would prefer to have one repo per language. 187 | 188 | pkgx has sensible rules for how packages are defined and installed so writing 189 | a port should be simple. 190 | 191 | We would love to explore how possible writing this in rust and then compiling 192 | to WASM for all other languages would be. Can you help? 193 | 194 | Open a [discussion] to start. 195 | 196 | [discussion]: https://github.com/orgs/pkgxdev/discussions 197 | [pkgx]: https://github.com/pkgxdev/pkgx 198 | [Deno]: https://deno.land 199 | [pantry]: https://github.com/pkgxdev/pantry 200 | [plumbing]: ./plumbing/ 201 | [porcelain]: ./porcelain/ 202 | [coverage]: https://coveralls.io/repos/github/pkgxdev/libpkgx/badge.svg 203 | [coveralls]: https://coveralls.io/github/pkgxdev/libpkgx 204 | -------------------------------------------------------------------------------- /src/utils/Path.test.ts: -------------------------------------------------------------------------------- 1 | import { assert, assertEquals, assertFalse, assertThrows, fail } from "@std/assert" 2 | import { SEPARATOR as SEP } from "jsr:@std/path@1" 3 | import Path from "./Path.ts" 4 | 5 | Deno.test("test Path", async test => { 6 | await test.step("creating files", () => { 7 | const start = Deno.build.os == 'windows' ? 'C:' : '' 8 | assertEquals(new Path("/a/b/c").components(), [start, "a", "b", "c"]) 9 | assertEquals(new Path("/a/b/c").split(), [new Path("/a/b"), "c"]) 10 | 11 | const tmp = Path.mktemp({prefix: "pkgx-"}) 12 | assert(tmp.isEmpty()) 13 | 14 | const child = tmp.join("a/b/c") 15 | assertFalse(child.parent().isDirectory()) 16 | child.parent().mkdir('p') 17 | assert(child.parent().isDirectory()) 18 | 19 | assertThrows(() => child.readlink()) // not found 20 | assertFalse(child.isReadableFile()) 21 | child.touch() 22 | assert(child.isReadableFile()) 23 | 24 | assert(child.string.startsWith(tmp.string)) 25 | assertFalse(tmp.isEmpty()) 26 | if (Deno.build.os != 'windows') { 27 | assertEquals(child.readlink(), child) // not a link 28 | } 29 | 30 | const rs = Deno.build.os === "windows" ? "C:\\" : '/' 31 | assertEquals(new Path("/").string, rs) 32 | }) 33 | 34 | await test.step("write and read", async () => { 35 | const tmp = Path.mktemp({prefix: "pkgx-"}) 36 | 37 | const data = tmp.join("test.dat") 38 | data.write({text: "hello\nworld"}) 39 | 40 | const lines = await asyncIterToArray(data.readLines()) 41 | assertEquals(lines, ["hello", "world"]) 42 | 43 | // will throw with no force flag 44 | assertThrows(() => data.write({ json: { hello: "world" } })) 45 | 46 | data.write({ json: { hello: "world" }, force: true }) 47 | assertEquals(await data.readJSON(), { hello: "world" }) 48 | }) 49 | 50 | await test.step("test walk", async () => { 51 | const tmp = Path.mktemp({prefix: "pkgx-"}) 52 | 53 | const a = tmp.join("a").mkdir() 54 | a.join("a1").touch() 55 | a.join("a2").touch() 56 | 57 | const b = tmp.join("b").mkdir() 58 | b.join("b1").touch() 59 | b.join("b2").touch() 60 | 61 | const c = tmp.join("c").mkdir() 62 | c.join("c1").touch() 63 | c.join("c2").touch() 64 | 65 | assert(c.join("c2").isFile()) 66 | assert(c.isDirectory()) 67 | 68 | const walked = (await asyncIterToArray(tmp.walk())) 69 | .map(([path, entry]) => { 70 | return {name: path.basename(), isDir: entry.isDirectory} 71 | }) 72 | .sort((a, b) => a.name.localeCompare(b.name)) 73 | 74 | assertEquals(walked, [ 75 | { name: "a", isDir: true}, 76 | { name: "a1", isDir: false}, 77 | { name: "a2", isDir: false}, 78 | { name: "b", isDir: true}, 79 | { name: "b1", isDir: false}, 80 | { name: "b2", isDir: false}, 81 | { name: "c", isDir: true}, 82 | { name: "c1", isDir: false}, 83 | { name: "c2", isDir: false}, 84 | ]) 85 | }) 86 | 87 | await test.step({ 88 | name: "test symlink created", 89 | ignore: Deno.build.os == "windows", 90 | fn() { 91 | const tmp = Path.mktemp({prefix: "pkgx-"}).join("foo").mkdir() 92 | const a = tmp.join("a").touch() 93 | const b = tmp.join("b") 94 | b.ln('s', { target: a }) 95 | assertEquals(b.readlink(), a) 96 | assert(b.isSymlink()) 97 | } 98 | }) 99 | }) 100 | 101 | Deno.test("Path.cwd", () => { 102 | const cwd = Path.cwd() 103 | assertEquals(cwd.string, Deno.cwd()) 104 | }) 105 | 106 | Deno.test("normalization", () => { 107 | const start = Deno.build.os == 'windows' ? 'C:\\' : SEP 108 | assertEquals(new Path("/a/b/").string, `${start}a${SEP}b`) 109 | assertEquals(new Path("/a/b////").string, `${start}a${SEP}b`) 110 | assertEquals(new Path("/a/b").string, `${start}a${SEP}b`) 111 | assertEquals(new Path("/a////b").string, `${start}a${SEP}b`) 112 | }) 113 | 114 | Deno.test("new Path(Path)", () => { 115 | const p1 = new Path("/home/user/file.txt") 116 | const p2 = new Path(p1) 117 | assertEquals(p1, p2) 118 | }) 119 | 120 | Deno.test("Path.join()", () => { 121 | const path = new Path("/foo") 122 | assert(path.eq(path.join())) 123 | }) 124 | 125 | Deno.test({ 126 | name: "Path.isExecutableFile()", 127 | ignore: Deno.build.os == "windows", 128 | fn() { 129 | const tmp = Path.mktemp({prefix: "pkgx-"}).mkdir() 130 | const executable = tmp.join("executable").touch() 131 | executable.chmod(0o755) 132 | const notExecutable = tmp.join("not-executable").touch() 133 | 134 | assert(executable.isExecutableFile()) 135 | assertFalse(notExecutable.isExecutableFile()) 136 | } 137 | }) 138 | 139 | Deno.test("Path.extname()", () => { 140 | const path = new Path("/home/user/file.txt") 141 | assertEquals(path.extname(), ".txt") 142 | }) 143 | 144 | Deno.test("Path.mv()", () => { 145 | const tmp = Path.mktemp({prefix: "pkgx-"}) 146 | const a = tmp.join("a").touch() 147 | const b = tmp.join("b") 148 | 149 | a.mv({ to: b }) 150 | assertFalse(a.exists()) 151 | assert(b.exists()) 152 | 153 | const c = tmp.join("c").mkdir() 154 | b.mv({ into: c }) 155 | 156 | assertFalse(b.exists()) 157 | assert(c.join("b").exists()) 158 | 159 | assertThrows(() => c.mv({ to: c })) 160 | 161 | // for coverage 162 | assert(b.neq(c)) 163 | }) 164 | 165 | Deno.test("Path.cp()", () => { 166 | const tmp = Path.mktemp({prefix: "pkgx-"}).mkdir() 167 | const a = tmp.join("a").touch() 168 | const b = tmp.join("b").mkdir() 169 | 170 | a.cp({ into: b }) 171 | assert(b.join("a").isReadableFile()) 172 | assert(a.isReadableFile()) 173 | }) 174 | 175 | Deno.test("Path.relative()", () => { 176 | const a = new Path("/home/user/file.txt") 177 | const b = new Path("/home/user/dir") 178 | assertEquals(a.relative({ to: b }), `..${SEP}file.txt`) 179 | assertEquals(b.relative({ to: a }), `..${SEP}dir`) 180 | }) 181 | 182 | Deno.test({ 183 | name: "Path.realpath()", 184 | ignore: Deno.build.os == "windows", 185 | fn() { 186 | const tmp = Path.mktemp({prefix: "pkgx-"}).mkdir() 187 | const a = tmp.join("a").touch() 188 | const b = tmp.join("b").ln('s', { target: a }) 189 | 190 | assertEquals(b.realpath(), a.realpath()) 191 | } 192 | }) 193 | 194 | Deno.test("Path.prettyLocalString()", () => { 195 | const path = Path.home().join(".config/pkgx/config.toml") 196 | assertEquals(path.prettyLocalString(), `~${SEP}.config${SEP}pkgx${SEP}config.toml`) 197 | 198 | const root = Deno.build.os == 'windows' ? 'C:\\' : '/' 199 | assertEquals(new Path("/a/b").prettyLocalString(), `${root}a${SEP}b`) 200 | }) 201 | 202 | Deno.test("Path.readYAMLAll()", async () => { 203 | const path = Path.cwd().join("./fixtures/pathtests/readYAMLAll.yaml"); 204 | 205 | try { 206 | const yamlData = await path.readYAMLAll(); // ✅ Use await 207 | 208 | assertEquals(Array.isArray(yamlData), true, "Expected yamlData to be an array"); 209 | 210 | if (!Array.isArray(yamlData)) { 211 | fail("Expected an array"); 212 | return; 213 | } 214 | 215 | assertEquals(yamlData.length, 2, "Expected exactly 2 YAML documents"); 216 | assertEquals(yamlData, [{ abc: "xyz" }, { ijk: "lmn" }], "YAML content mismatch"); 217 | 218 | } catch (err) { 219 | console.error("Error reading YAML:", err); 220 | fail("Error reading YAML"); 221 | } 222 | }); 223 | 224 | Deno.test("Path.readYAMLAllErr()", async () => { 225 | const path = Path.cwd().join("./fixtures/pathtests/invalid.yaml"); 226 | try { 227 | await path.readYAMLAll(); 228 | fail("invalid file should not reach here") 229 | } catch (err) { 230 | if (err instanceof Error) { 231 | assertEquals(err.name, "NotFound") 232 | } else{ 233 | throw err; 234 | } 235 | } 236 | }); 237 | 238 | Deno.test("Path.chuzzle()", () => { 239 | const path = Path.mktemp().join("file.txt").touch() 240 | assertEquals(path.chuzzle(), path) 241 | 242 | const missingPath = path.parent().join("ghost.void") 243 | assertEquals(missingPath.chuzzle(), undefined) 244 | }) 245 | 246 | Deno.test("Path.ls()", async () => { 247 | const tmp = Path.mktemp({prefix: "pkgx-"}).mkdir() 248 | tmp.join("a").touch() 249 | tmp.join("b").touch() 250 | tmp.join("c").mkdir() 251 | 252 | const entries = (await asyncIterToArray(tmp.ls())).map(([,{name}]) => name) 253 | assertEquals(entries.sort(), ["a", "b", "c"]) 254 | }) 255 | 256 | async function asyncIterToArray (iter: AsyncIterable){ 257 | const result = []; 258 | for await(const i of iter) { 259 | result.push(i); 260 | } 261 | return result; 262 | } 263 | 264 | Deno.test("ctor throws", () => { 265 | assertThrows(() => new Path("")) 266 | assertThrows(() => new Path(" ")) 267 | assertThrows(() => new Path(" \n ")) 268 | assertThrows(() => new Path(" / ")) 269 | }) 270 | 271 | Deno.test({ 272 | name: "dirname", 273 | ignore: Deno.build.os != "windows", 274 | fn() { 275 | const p = new Path("Y:\\") 276 | assertEquals(p.string, "Y:\\") 277 | assertEquals(p.parent().string, "Y:\\") 278 | assertEquals(p.parent().parent().parent().string, "Y:\\") 279 | 280 | const q = new Path("\\\\bar\\foo\\baz") 281 | 282 | assertEquals(q.string, "\\\\bar\\foo\\baz") 283 | assertEquals(q.parent().string, "\\\\bar\\foo") 284 | assertEquals(q.parent().parent().parent().string, "\\\\bar\\foo") // the first path after the hostname is actually a root 285 | } 286 | }) 287 | 288 | Deno.test("join roots", () => { 289 | if (Deno.build.os == "windows") { 290 | assertEquals(new Path("C:\\foo").join("D:\\bar").string, "D:\\bar") 291 | assertEquals(new Path("C:").join("D:\\bar\baz").string, "D:\\bar\baz") 292 | 293 | assertEquals(new Path("c:\\foo\bar").join("\\\\bar\\baz").string, "\\\\bar\\baz") 294 | 295 | } else { 296 | assertEquals(new Path("/foo").join("/bar").string, "/bar") 297 | } 298 | }) 299 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2023 pkgx inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/utils/semver.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * we have our own implementation because open source is full of weird 3 | * but *almost* valid semver schemes, eg: 4 | * openssl 1.1.1q 5 | * ghc 5.64.3.2 6 | * it also allows us to implement semver_intersection without hating our lives 7 | */ 8 | export default class SemVer { 9 | readonly components: number[] 10 | 11 | major: number 12 | minor: number 13 | patch: number 14 | 15 | //FIXME parse these 16 | readonly prerelease: string[] = [] 17 | readonly build: string[] = [] 18 | 19 | readonly raw: string 20 | readonly pretty?: string 21 | 22 | constructor(input: string | number[] | Range | SemVer) { 23 | if (typeof input == 'string') { 24 | const vprefix = input.startsWith('v') 25 | const raw = vprefix ? input.slice(1) : input 26 | const parts = raw.split('.') 27 | let pretty_is_raw = false 28 | this.components = parts.flatMap((x, index) => { 29 | const match = x.match(/^(\d+)([a-z])$/) 30 | if (match) { 31 | if (index != parts.length - 1) throw new Error(`invalid version: ${input}`) 32 | const n = parseInt(match[1]) 33 | if (isNaN(n)) throw new Error(`invalid version: ${input}`) 34 | pretty_is_raw = true 35 | return [n, char_to_num(match[2])] 36 | } else if (/^\d+$/.test(x)) { 37 | const n = parseInt(x) // parseInt will parse eg. `5-start` to `5` 38 | if (isNaN(n)) throw new Error(`invalid version: ${input}`) 39 | return [n] 40 | } else { 41 | throw new Error(`invalid version: ${input}`) 42 | } 43 | }) 44 | this.raw = raw 45 | if (pretty_is_raw) this.pretty = raw 46 | } else if (input instanceof Range || input instanceof SemVer) { 47 | const v = input instanceof Range ? input.single() : input 48 | if (!v) throw new Error(`range represents more than a single version: ${input}`) 49 | this.components = v.components 50 | this.raw = v.raw 51 | this.pretty = v.pretty 52 | } else { 53 | this.components = [...input] 54 | this.raw = input.join('.') 55 | } 56 | 57 | this.major = this.components[0] 58 | this.minor = this.components[1] ?? 0 59 | this.patch = this.components[2] ?? 0 60 | 61 | function char_to_num(c: string) { 62 | return c.charCodeAt(0) - 'a'.charCodeAt(0) + 1 63 | } 64 | } 65 | 66 | toString(): string { 67 | return this.pretty ?? 68 | (this.components.length <= 3 69 | ? `${this.major}.${this.minor}.${this.patch}` 70 | : this.components.join('.')) 71 | } 72 | 73 | eq(that: SemVer): boolean { 74 | return this.compare(that) == 0 75 | } 76 | 77 | neq(that: SemVer): boolean { 78 | return this.compare(that) != 0 79 | } 80 | 81 | gt(that: SemVer): boolean { 82 | return this.compare(that) > 0 83 | } 84 | 85 | gte(that: SemVer): boolean { 86 | return this.compare(that) >= 0 87 | } 88 | 89 | lt(that: SemVer): boolean { 90 | return this.compare(that) < 0 91 | } 92 | 93 | lte(that: SemVer): boolean { 94 | return this.compare(that) <= 0 95 | } 96 | 97 | compare(that: SemVer): number { 98 | return _compare(this, that) 99 | } 100 | 101 | [Symbol.for("Deno.customInspect")]() { 102 | return this.toString() 103 | } 104 | } 105 | 106 | /// the same as the constructor but swallows the error returning undefined instead 107 | /// also slightly more tolerant parsing 108 | export function parse(input: string) { 109 | try { 110 | return new SemVer(input) 111 | } catch { 112 | return undefined 113 | } 114 | } 115 | 116 | /// determines if the input is in fact a valid semantic version 117 | export function isValid(input: string) { 118 | return parse(input) !== undefined 119 | } 120 | 121 | /// we don’t support as much as node-semver but we refuse to do so because it is badness 122 | export class Range { 123 | // contract [0, 1] where 0 != 1 and 0 < 1 124 | readonly set: ([SemVer, SemVer] | SemVer)[] | '*' 125 | 126 | constructor(input: string | ([SemVer, SemVer] | SemVer)[]) { 127 | if (input === "*") { 128 | this.set = '*' 129 | } else if (typeof input !== 'string') { 130 | this.set = input 131 | } else { 132 | input = input.trim() 133 | 134 | const err = () => new Error(`invalid semver range: ${input}`) 135 | 136 | this.set = input.split(/(?:,|\s*\|\|\s*)/).map(input => { 137 | let match = input.match(/^>=((\d+\.)*\d+)\s*(<((\d+\.)*\d+))?$/) 138 | if (match) { 139 | const v1 = new SemVer(match[1]) 140 | const v2 = match[3] ? new SemVer(match[4])! : new SemVer([Infinity, Infinity, Infinity]) 141 | return [v1, v2] 142 | } else if ((match = input.match(/^([~=<^@])(.+)$/))) { 143 | let v1: SemVer | undefined, v2: SemVer | undefined 144 | switch (match[1]) { 145 | // deno-lint-ignore no-case-declarations 146 | case "^": 147 | v1 = new SemVer(match[2]) 148 | const parts = [] 149 | for (let i = 0; i < v1.components.length; i++) { 150 | if (v1.components[i] === 0 && i < v1.components.length - 1) { 151 | parts.push(0) 152 | } else { 153 | parts.push(v1.components[i] + 1) 154 | break 155 | } 156 | } 157 | v2 = new SemVer(parts) 158 | return [v1, v2] 159 | case "~": { 160 | v1 = new SemVer(match[2]) 161 | if (v1.components.length == 1) { 162 | // yep this is the official policy 163 | v2 = new SemVer([v1.major + 1]) 164 | } else { 165 | v2 = new SemVer([v1.major, v1.minor + 1]) 166 | } 167 | } return [v1, v2] 168 | case "<": 169 | v1 = new SemVer([0]) 170 | v2 = new SemVer(match[2]) 171 | return [v1, v2] 172 | case "=": 173 | return new SemVer(match[2]) 174 | case "@": { 175 | // @ is not a valid semver operator, but people expect it to work like so: 176 | // @5 => latest 5.x (ie ^5) 177 | // @5.1 => latest 5.1.x (ie. ~5.1) 178 | // @5.1.0 => latest 5.1.0 (usually 5.1.0 since most stuff hasn't got more digits) 179 | const parts = match[2].split(".").map(x => parseInt(x)) 180 | v1 = new SemVer(parts) 181 | const last = parts.pop()! 182 | v2 = new SemVer([...parts, last + 1]) 183 | return [v1, v2] 184 | }} 185 | } 186 | throw err() 187 | }) 188 | 189 | /// I think this is an impossible state but let’s be sure 190 | if (this.set.length == 0) throw err() 191 | 192 | for (const i of this.set) { 193 | if (Array.isArray(i) && !i[0].lt(i[1])) throw err() 194 | } 195 | } 196 | } 197 | 198 | toString(): string { 199 | if (this.set === '*') { 200 | return '*' 201 | } else { 202 | return this.set.map(v => { 203 | if (!Array.isArray(v)) return `=${v.toString()}` 204 | const [v1, v2] = v 205 | if (v2.major == v1.major + 1 && v2.minor == 0 && v2.patch == 0) { 206 | const v = chomp(v1) 207 | if (v1.major == 0) { 208 | if (v1.components.length == 1) { 209 | return `^0` 210 | } else { 211 | return `>=${v}<1` 212 | } 213 | } else { 214 | return `^${v}` 215 | } 216 | } else if (v2.major == v1.major && v2.minor == v1.minor + 1 && v2.patch == 0) { 217 | const v = chomp(v1) 218 | return `~${v}` 219 | } else if (v2.major == Infinity) { 220 | const v = chomp(v1) 221 | return `>=${v}` 222 | } else if (at(v1, v2)) { 223 | return `@${v1}` 224 | } else { 225 | return `>=${chomp(v1)}<${chomp(v2)}` 226 | } 227 | }).join(",") 228 | } 229 | 230 | function at(v1: SemVer, {components: cc2}: SemVer) { 231 | const cc1 = [...v1.components] 232 | 233 | if (cc1.length > cc2.length) { 234 | return false 235 | } 236 | 237 | // it's possible the components were short due to 0-truncation 238 | // add them back so our algo works 239 | while (cc1.length < cc2.length) { 240 | cc1.push(0) 241 | } 242 | 243 | if (last(cc1) != last(cc2) - 1) { 244 | return false 245 | } 246 | 247 | for (let i = 0; i < (cc1.length - 1); i++) { 248 | if (cc1[i] != cc2[i]) return false 249 | } 250 | 251 | return true 252 | } 253 | 254 | function last(arr: number[]) { 255 | return arr[arr.length - 1] 256 | } 257 | } 258 | 259 | // eq(that: Range): boolean { 260 | // if (this.set.length !== that.set.length) return false 261 | // for (let i = 0; i < this.set.length; i++) { 262 | // const [a,b] = [this.set[i], that.set[i]] 263 | // if (typeof a !== 'string' && typeof b !== 'string') { 264 | // if (a[0].neq(b[0])) return false 265 | // if (a[1].neq(b[1])) return false 266 | // } else if (a != b) { 267 | // return false 268 | // } 269 | // } 270 | // return true 271 | // } 272 | 273 | /// tolerant to stuff in the wild that hasn’t semver specifiers 274 | static parse(input: string | number): Range | undefined { 275 | if (!input) return 276 | input = input.toString() 277 | try { 278 | return new Range(input) 279 | } catch { 280 | if (!/^(\d+\.)*\d+$/.test(input)) return 281 | 282 | // AFAICT this is what people expect 283 | // verified via https://jubianchi.github.io/semver-check/ 284 | 285 | return new Range(`@${input}`) 286 | } 287 | } 288 | 289 | satisfies(version: SemVer): boolean { 290 | if (this.set === '*') { 291 | return true 292 | } else { 293 | return this.set.some(v => { 294 | if (Array.isArray(v)) { 295 | const [v1, v2] = v 296 | return version.compare(v1) >= 0 && version.compare(v2) < 0 297 | } else { 298 | return version.eq(v) 299 | } 300 | }) 301 | } 302 | } 303 | 304 | max(versions: SemVer[]): SemVer | undefined { 305 | return versions.filter(x => this.satisfies(x)).sort((a,b) => a.compare(b)).pop() 306 | } 307 | 308 | single(): SemVer | undefined { 309 | if (this.set === '*') return 310 | if (this.set.length > 1) return 311 | return Array.isArray(this.set[0]) ? undefined : this.set[0] 312 | } 313 | 314 | [Symbol.for("Deno.customInspect")]() { 315 | return this.toString() 316 | } 317 | } 318 | 319 | function zip(a: T[], b: U[]) { 320 | const N = Math.max(a.length, b.length) 321 | const rv: [T | undefined, U | undefined][] = [] 322 | for (let i = 0; i < N; ++i) { 323 | rv.push([a[i], b[i]]) 324 | } 325 | return rv 326 | } 327 | 328 | 329 | function _compare(a: SemVer, b: SemVer): number { 330 | for (let [c,d] of zip(cmpcomponents(a), cmpcomponents(b))) { 331 | c ??= 0 332 | d ??= 0 333 | if (c !== d) return c - d 334 | } 335 | return 0 336 | 337 | /// we sort calver before semver, mainly because we label pre-releases with calver 338 | /// we worry that one day we will severely regret this but… it’s what we do for now 339 | function cmpcomponents(v: SemVer) { 340 | if (v.major > 1996 && v.major != Infinity) { 341 | return [0,0,0, ...v.components] 342 | } else { 343 | return v.components 344 | } 345 | } 346 | } 347 | export { _compare as compare } 348 | 349 | 350 | export function intersect(a: Range, b: Range): Range { 351 | if (b.set === '*') return a 352 | if (a.set === '*') return b 353 | 354 | // calculate the intersection between two semver.Ranges 355 | const set: ([SemVer, SemVer] | SemVer)[] = [] 356 | 357 | for (const aa of a.set) { 358 | for (const bb of b.set) { 359 | if (!Array.isArray(aa) && !Array.isArray(bb)) { 360 | if (aa.eq(bb)) set.push(aa) 361 | } else if (!Array.isArray(aa)) { 362 | const bbb = bb as [SemVer, SemVer] 363 | if (aa.compare(bbb[0]) >= 0 && aa.lt(bbb[1])) set.push(aa) 364 | } else if (!Array.isArray(bb)) { 365 | const aaa = aa as [SemVer, SemVer] 366 | if (bb.compare(aaa[0]) >= 0 && bb.lt(aaa[1])) set.push(bb) 367 | } else { 368 | const a1 = aa[0] 369 | const a2 = aa[1] 370 | const b1 = bb[0] 371 | const b2 = bb[1] 372 | 373 | if (a1.compare(b2) >= 0 || b1.compare(a2) >= 0) { 374 | continue 375 | } 376 | 377 | set.push([a1.compare(b1) > 0 ? a1 : b1, a2.compare(b2) < 0 ? a2 : b2]) 378 | } 379 | } 380 | } 381 | 382 | if (set.length <= 0) throw new Error(`cannot intersect: ${a} && ${b}`) 383 | 384 | return new Range(set) 385 | } 386 | 387 | 388 | //FIXME yes yes this is not sufficient 389 | export const regex = /\d+\.\d+\.\d+/ 390 | 391 | function chomp(v: SemVer) { 392 | return v.toString().replace(/(\.0)+$/g, '') || '0' 393 | } 394 | -------------------------------------------------------------------------------- /src/utils/semver.test.ts: -------------------------------------------------------------------------------- 1 | // deno-lint-ignore-file no-explicit-any 2 | import { assert, assertEquals, assertFalse, assertThrows } from "@std/assert" 3 | import SemVer, * as semver from "./semver.ts" 4 | 5 | 6 | Deno.test("semver", async test => { 7 | await test.step("sort", () => { 8 | const input = [new SemVer([1,2,3]), new SemVer("10.3.4"), new SemVer("1.2.4"), semver.parse("1.2.3.1")!, new SemVer("2.3.4")] 9 | const sorted1 = [...input].sort(semver.compare) 10 | const sorted2 = [...input].sort() 11 | 12 | assertEquals(sorted1.join(", "), "1.2.3, 1.2.3.1, 1.2.4, 2.3.4, 10.3.4") 13 | 14 | /// we are showing we understand how vanilla JS works here 15 | assertEquals(sorted2.join(", "), "1.2.3, 1.2.3.1, 1.2.4, 10.3.4, 2.3.4") 16 | }) 17 | 18 | await test.step("calver sort", () => { 19 | const input = [new SemVer([1,2,3]), new SemVer("2.3.4"), new SemVer("2023.03.04"), semver.parse("1.2.3.1")!, new SemVer([3,4,5])] 20 | const sorted1 = [...input].sort(semver.compare) 21 | const sorted2 = [...input].sort() 22 | 23 | assertEquals(sorted1.join(","), "2023.3.4,1.2.3,1.2.3.1,2.3.4,3.4.5") 24 | 25 | /// we are showing we understand how vanilla JS works here 26 | assertEquals(sorted2.join(","), "1.2.3,1.2.3.1,2.3.4,2023.3.4,3.4.5") 27 | }) 28 | 29 | await test.step("parse", () => { 30 | assertEquals(semver.parse("1.2.3.4.5")?.toString(), "1.2.3.4.5") 31 | assertEquals(semver.parse("1.2.3.4")?.toString(), "1.2.3.4") 32 | assertEquals(semver.parse("1.2.3")?.toString(), "1.2.3") 33 | assertEquals(semver.parse("1.2")?.toString(), "1.2.0") 34 | assertEquals(semver.parse("1")?.toString(), "1.0.0") 35 | }) 36 | 37 | await test.step("isValidSemVer", () => { 38 | assert(semver.isValid("1.2.3.4.5")) 39 | assert(semver.isValid("1a")) 40 | assert(semver.isValid("20.4")) 41 | assert(semver.isValid("2023.05.10")) 42 | assertFalse(semver.isValid("a")) 43 | assertFalse(semver.isValid("!#!@#!@#")) 44 | }) 45 | 46 | await test.step("satisfies", () => { 47 | assertEquals(new semver.Range("=3.1.0").max([new SemVer("3.1.0")]), new SemVer("3.1.0")) 48 | 49 | // the following two test for https://github.com/pkgxdev/lib/pull/36 50 | assertEquals(new semver.Range("^300").max([new SemVer("3.1.0")]), undefined) 51 | assertEquals(new semver.Range("@300").max([new SemVer("3.1.0")]), undefined) 52 | }) 53 | 54 | await test.step("constructor", () => { 55 | assertEquals(new SemVer("1.2.3.4.5.6").toString(), "1.2.3.4.5.6") 56 | assertEquals(new SemVer("1.2.3.4.5").toString(), "1.2.3.4.5") 57 | assertEquals(new SemVer("1.2.3.4").toString(), "1.2.3.4") 58 | assertEquals(new SemVer("1.2.3").toString(), "1.2.3") 59 | assertEquals(new SemVer("v1.2.3").toString(), "1.2.3") 60 | assertEquals(new SemVer("1.2").toString(), "1.2.0") 61 | assertEquals(new SemVer("v1.2").toString(), "1.2.0") 62 | assertEquals(new SemVer("1").toString(), "1.0.0") 63 | assertEquals(new SemVer("v1").toString(), "1.0.0") 64 | 65 | assertEquals(new SemVer("9e").toString(), "9e") 66 | assertEquals(new SemVer("9e").components, [9,5]) 67 | assertEquals(new SemVer("3.3a").toString(), "3.3a") 68 | assertEquals(new SemVer("3.3a").components, [3,3,1]) 69 | assertEquals(new SemVer("1.1.1q").toString(), "1.1.1q") 70 | assertEquals(new SemVer("1.1.1q").components, [1,1,1,17]) 71 | }) 72 | 73 | await test.step("ranges", () => { 74 | const a = new semver.Range(">=1.2.3<2.3.4 || >=3") 75 | assertEquals(a.toString(), ">=1.2.3<2.3.4,>=3") 76 | 77 | assert(a.satisfies(new SemVer("1.2.3"))) 78 | assert(a.satisfies(new SemVer("1.4.1"))) 79 | assert(a.satisfies(new SemVer("3.0.0"))) 80 | assert(a.satisfies(new SemVer("90.0.0"))) 81 | assertFalse(a.satisfies(new SemVer("2.3.4"))) 82 | assertFalse(a.satisfies(new SemVer("2.5.0"))) 83 | 84 | const b = new semver.Range("^0.15") 85 | // Due to the nature of the `^` operator, this 86 | // is the same as `~0.15`, and our code represents 87 | // it as such. 88 | assertEquals(b.toString(), "~0.15") 89 | 90 | const c = new semver.Range("~0.15") 91 | assertEquals(c.toString(), "~0.15") 92 | 93 | assert(c.satisfies(new SemVer("0.15.0"))) 94 | assert(c.satisfies(new SemVer("0.15.1"))) 95 | assertFalse(c.satisfies(new SemVer("0.14.0"))) 96 | assertFalse(c.satisfies(new SemVer("0.16.0"))) 97 | 98 | const d = new semver.Range("~0.15.1") 99 | assertEquals(d.toString(), "~0.15.1") 100 | assert(d.satisfies(new SemVer("0.15.1"))) 101 | assert(d.satisfies(new SemVer("0.15.2"))) 102 | assertFalse(d.satisfies(new SemVer("0.15.0"))) 103 | assertFalse(d.satisfies(new SemVer("0.16.0"))) 104 | assertFalse(d.satisfies(new SemVer("0.14.0"))) 105 | 106 | // `~` is weird 107 | const e = new semver.Range("~1") 108 | assertEquals(e.toString(), "^1") 109 | assert(e.satisfies(new SemVer("v1.0"))) 110 | assert(e.satisfies(new SemVer("v1.1"))) 111 | assertFalse(e.satisfies(new SemVer("v2"))) 112 | 113 | const f = new semver.Range("^14||^16||^18") 114 | assert(f.satisfies(new SemVer("14.0.0"))) 115 | assertFalse(f.satisfies(new SemVer("15.0.0"))) 116 | assert(f.satisfies(new SemVer("16.0.0"))) 117 | assertFalse(f.satisfies(new SemVer("17.0.0"))) 118 | assert(f.satisfies(new SemVer("18.0.0"))) 119 | 120 | const g = new semver.Range("<15") 121 | assert(g.satisfies(new SemVer("14.0.0"))) 122 | assert(g.satisfies(new SemVer("0.0.1"))) 123 | assertFalse(g.satisfies(new SemVer("15.0.0"))) 124 | 125 | const i = new semver.Range("^1.2.3.4") 126 | assert(i.satisfies(new SemVer("1.2.3.4"))) 127 | assert(i.satisfies(new SemVer("1.2.3.5"))) 128 | assert(i.satisfies(new SemVer("1.2.4.2"))) 129 | assert(i.satisfies(new SemVer("1.3.4.2"))) 130 | assertFalse(i.satisfies(new SemVer("2.0.0"))) 131 | 132 | const j = new semver.Range("^0.1.2.3") 133 | assert(j.satisfies(new SemVer("0.1.2.3"))) 134 | assert(j.satisfies(new SemVer("0.1.3"))) 135 | assertFalse(j.satisfies(new SemVer("0.2.0"))) 136 | 137 | const k = new semver.Range("^0.0.1.2") 138 | assertFalse(k.satisfies(new SemVer("0.0.1.1"))) 139 | assert(k.satisfies(new SemVer("0.0.1.2"))) 140 | assert(k.satisfies(new SemVer("0.0.1.9"))) 141 | assertFalse(k.satisfies(new SemVer("0.0.2.0"))) 142 | 143 | const l = new semver.Range("^0.0.0.1") 144 | assertFalse(l.satisfies(new SemVer("0.0.0.0"))) 145 | assert(l.satisfies(new SemVer("0.0.0.1"))) 146 | assertFalse(l.satisfies(new SemVer("0.0.0.2"))) 147 | 148 | // This one is weird, but it should mean "<1" 149 | const m = new semver.Range("^0") 150 | assert(m.satisfies(new SemVer("0.0.0"))) 151 | assert(m.satisfies(new SemVer("0.0.1"))) 152 | assert(m.satisfies(new SemVer("0.1.0"))) 153 | assert(m.satisfies(new SemVer("0.9.1"))) 154 | assertFalse(m.satisfies(new SemVer("1.0.0"))) 155 | 156 | assertThrows(() => new semver.Range("1")) 157 | assertThrows(() => new semver.Range("1.2")) 158 | assertThrows(() => new semver.Range("1.2.3")) 159 | assertThrows(() => new semver.Range("1.2.3.4")) 160 | 161 | assertEquals(new semver.Range("@300").toString(), "^300") 162 | assertEquals(new semver.Range("@300.1").toString(), "~300.1") 163 | assertEquals(new semver.Range("@300.1.0").toString(), "@300.1.0") 164 | assertEquals(new semver.Range(">=300.1.0<300.1.1").toString(), "@300.1.0") 165 | }) 166 | 167 | await test.step("intersection", async test => { 168 | await test.step("^3.7…=3.11", () => { 169 | const a = new semver.Range("^3.7") 170 | const b = new semver.Range("=3.11") 171 | 172 | assertEquals(b.toString(), "=3.11.0") 173 | 174 | const c = semver.intersect(a, b) 175 | assertEquals(c.toString(), "=3.11.0") 176 | }) 177 | 178 | await test.step("^3.7…^3.9", () => { 179 | const a = new semver.Range("^3.7") 180 | const b = new semver.Range("^3.9") 181 | 182 | assertEquals(b.toString(), "^3.9") 183 | 184 | const c = semver.intersect(a, b) 185 | assertEquals(c.toString(), "^3.9") 186 | }) 187 | 188 | await test.step("^3.7…*", () => { 189 | const a = new semver.Range("^3.7") 190 | const b = new semver.Range("*") 191 | 192 | assertEquals(b.toString(), "*") 193 | 194 | const c = semver.intersect(a, b) 195 | assertEquals(c.toString(), "^3.7") 196 | }) 197 | 198 | await test.step("~3.7…~3.8", () => { 199 | const a = new semver.Range("~3.7") 200 | const b = new semver.Range("~3.8") 201 | 202 | assertThrows(() => semver.intersect(a, b)) 203 | }) 204 | 205 | await test.step("^3.7…=3.8", () => { 206 | const a = new semver.Range("^3.7") 207 | const b = new semver.Range("=3.8") 208 | const c = semver.intersect(a, b) 209 | assertEquals(c.toString(), "=3.8.0") 210 | }) 211 | 212 | await test.step("^11,^12…^11.3", () => { 213 | const a = new semver.Range("^11,^12") 214 | const b = new semver.Range("^11.3") 215 | const c = semver.intersect(a, b) 216 | assertEquals(c.toString(), "^11.3") 217 | }) 218 | 219 | await test.step(">=11<12", () => { 220 | const a = new semver.Range(">=11<12") 221 | const b = new semver.Range(">=11.0.0 <13.0.0.0") 222 | //assertEquals(a.toString(), "^11.3") 223 | assert(a.satisfies(new SemVer("11.0.0"))) 224 | assert(a.satisfies(new SemVer("11.9.0"))) 225 | assert(b.satisfies(new SemVer("11.0.0"))) 226 | assert(b.satisfies(new SemVer("11.9.0"))) 227 | assert(b.satisfies(new SemVer("12.9.0"))) 228 | }) 229 | 230 | await test.step(">=0.47<1", () => { 231 | const a = new semver.Range(">=0.47<1") 232 | assertEquals(a.toString(), ">=0.47<1") 233 | assert(a.satisfies(new SemVer("0.47.0"))) 234 | assert(a.satisfies(new SemVer("0.47.9"))) 235 | assert(a.satisfies(new SemVer("0.48.0"))) 236 | assert(a.satisfies(new SemVer("0.80.0"))) 237 | assertFalse(a.satisfies(new SemVer("1.0.0"))) 238 | }) 239 | 240 | await test.step("^0 string is not @0.0.0", () => { 241 | const a = new semver.Range("^0") 242 | assertEquals(a.toString(), "^0") 243 | 244 | const b = new semver.Range("^0.0") 245 | assertEquals(b.toString(), "~0") //NOTE strictly should be ~0.0 but this is fine 246 | 247 | const c = new semver.Range("^1") 248 | assertEquals(c.toString(), "^1") 249 | 250 | const d = new semver.Range("^1.0") 251 | assertEquals(d.toString(), "^1") 252 | }) 253 | 254 | //FIXME this *should* work 255 | // await test.step("^11,^12…^11.3,^12.2", () => { 256 | // const a = new semver.Range("^11,^12") 257 | // const b = new semver.Range("^11.3") 258 | // const c = semver.intersect(a, b) 259 | // assertEquals(c.toString(), "^11.3,^12.2") 260 | // }) 261 | 262 | /* https://github.com/pkgxdev/libpkgx/issues/42 */ 263 | await test.step(">=1<1.0.19", async test => { 264 | await test.step("1", () => { new semver.Range(">=1<1.0.19") }) 265 | await test.step("2", () => { new semver.Range(">=1.0<1.0.19") }) 266 | await test.step("3", () => { 267 | assertEquals(new semver.Range(">=1<2").toString(), "^1") 268 | }) 269 | 270 | assert(new SemVer("1").lt(new SemVer("1.0.19")), "1.0.0 is less than 1.0.19") 271 | }) 272 | }) 273 | }) 274 | 275 | Deno.test("coverage", () => { 276 | assert(new SemVer("1.2.3").eq(new SemVer([1,2,3]))) 277 | assert(new SemVer("1.2.3").neq(new SemVer([1,2,4]))) 278 | assert(new SemVer("1.2.3").lt(new SemVer([1,2,4]))) 279 | assert(new SemVer("1.2.4").gt(new SemVer([1,2,3]))) 280 | 281 | assertThrows(() => new SemVer("1.q.3")) 282 | 283 | assert(semver.Range.parse("^1")?.satisfies(new SemVer("1.2.3"))) 284 | 285 | assertEquals(new semver.Range("=1.0.0").single(), new SemVer("1.0.0")) 286 | 287 | assertEquals((new semver.Range("^1") as any)[Symbol.for("Deno.customInspect")](), "^1") 288 | assertEquals((new SemVer("1.2.3") as any)[Symbol.for("Deno.customInspect")](), "1.2.3") 289 | 290 | assert(semver.parse("a") == undefined) 291 | 292 | assertThrows(() => new semver.Range(">=3<2")) 293 | assertThrows(() => new semver.Range("")) 294 | 295 | assertEquals(new SemVer(new SemVer("1.2.3")), new SemVer("1.2.3")) 296 | assertEquals(new SemVer(new semver.Range("=1.0.0")), new SemVer("1.0.0")) 297 | assertThrows(() => new SemVer(new semver.Range("^1"))) 298 | 299 | assertThrows(() => new semver.Range("1")) 300 | 301 | assertEquals(semver.Range.parse("1")?.toString(), new semver.Range("^1").toString()) 302 | assertEquals(semver.Range.parse("1.1")?.toString(), new semver.Range("~1.1").toString()) 303 | assertEquals(semver.Range.parse("1.1.2")?.toString(), new semver.Range("@1.1.2").toString()) 304 | 305 | assertEquals(semver.Range.parse("a"), undefined) 306 | 307 | assertEquals(new semver.Range("*").toString(), "*") 308 | 309 | assert(new semver.Range("*").satisfies(new SemVer("1.2.3"))) 310 | 311 | assertEquals(new semver.Range("^1").max([new SemVer("1.2.3"), new SemVer("1.2.4")]), new SemVer("1.2.4")) 312 | 313 | assertEquals(new semver.Range("*").single(), undefined) 314 | 315 | assert(semver.intersect(new semver.Range("*"), new semver.Range("^2"))) 316 | assert(semver.intersect(new semver.Range("^2"), new semver.Range("*"))) 317 | 318 | 319 | assertEquals(new semver.Range("^1.2.0").toString(), "^1.2") 320 | }) 321 | -------------------------------------------------------------------------------- /vendor/sqlite3@0.10.0/src/ffi.ts: -------------------------------------------------------------------------------- 1 | const symbols = { 2 | sqlite3_open_v2: { 3 | parameters: [ 4 | "buffer", // const char *filename 5 | "buffer", // sqlite3 **ppDb 6 | "i32", // int flags 7 | "pointer", // const char *zVfs 8 | ], 9 | result: "i32", 10 | }, 11 | 12 | sqlite3_close_v2: { 13 | parameters: [ 14 | "pointer", // sqlite3 *db 15 | ], 16 | result: "i32", 17 | }, 18 | 19 | sqlite3_changes: { 20 | parameters: [ 21 | "pointer", // sqlite3 *db 22 | ], 23 | result: "i32", 24 | }, 25 | 26 | sqlite3_total_changes: { 27 | parameters: [ 28 | "pointer", // sqlite3 *db 29 | ], 30 | result: "i32", 31 | }, 32 | 33 | sqlite3_last_insert_rowid: { 34 | parameters: [ 35 | "pointer", // sqlite3 *db 36 | ], 37 | result: "i32", 38 | }, 39 | 40 | sqlite3_get_autocommit: { 41 | parameters: [ 42 | "pointer", // sqlite3 *db 43 | ], 44 | result: "i32", 45 | }, 46 | 47 | sqlite3_prepare_v2: { 48 | parameters: [ 49 | "pointer", // sqlite3 *db 50 | "buffer", // const char *zSql 51 | "i32", // int nByte 52 | "buffer", // sqlite3_stmt **ppStmt 53 | "pointer", // const char **pzTail 54 | ], 55 | result: "i32", 56 | }, 57 | 58 | sqlite3_reset: { 59 | parameters: [ 60 | "pointer", // sqlite3_stmt *pStmt 61 | ], 62 | result: "i32", 63 | }, 64 | 65 | sqlite3_clear_bindings: { 66 | parameters: [ 67 | "pointer", // sqlite3_stmt *pStmt 68 | ], 69 | result: "i32", 70 | }, 71 | 72 | sqlite3_step: { 73 | parameters: [ 74 | "pointer", // sqlite3_stmt *pStmt 75 | ], 76 | result: "i32", 77 | }, 78 | 79 | sqlite3_column_count: { 80 | parameters: [ 81 | "pointer", // sqlite3_stmt *pStmt 82 | ], 83 | result: "i32", 84 | }, 85 | 86 | sqlite3_column_type: { 87 | parameters: [ 88 | "pointer", // sqlite3_stmt *pStmt 89 | "i32", // int iCol 90 | ], 91 | result: "i32", 92 | }, 93 | 94 | sqlite3_column_text: { 95 | parameters: [ 96 | "pointer", // sqlite3_stmt *pStmt 97 | "i32", // int iCol 98 | ], 99 | result: "pointer", 100 | }, 101 | sqlite3_column_value: { 102 | parameters: [ 103 | "pointer", // sqlite3_stmt *pStmt 104 | "i32", // int iCol 105 | ], 106 | result: "pointer", 107 | }, 108 | 109 | sqlite3_finalize: { 110 | parameters: [ 111 | "pointer", // sqlite3_stmt *pStmt 112 | ], 113 | result: "i32", 114 | }, 115 | 116 | sqlite3_exec: { 117 | parameters: [ 118 | "pointer", // sqlite3 *db 119 | "buffer", // const char *sql 120 | "pointer", // sqlite3_callback callback 121 | "pointer", // void *arg 122 | "buffer", // char **errmsg 123 | ], 124 | result: "i32", 125 | }, 126 | 127 | sqlite3_free: { 128 | parameters: [ 129 | "pointer", // void *p 130 | ], 131 | result: "void", 132 | }, 133 | 134 | sqlite3_column_int: { 135 | parameters: [ 136 | "pointer", // sqlite3_stmt *pStmt 137 | "i32", // int iCol 138 | ], 139 | result: "i32", 140 | }, 141 | 142 | sqlite3_column_double: { 143 | parameters: [ 144 | "pointer", // sqlite3_stmt *pStmt 145 | "i32", // int iCol 146 | ], 147 | result: "f64", 148 | }, 149 | 150 | sqlite3_column_blob: { 151 | parameters: [ 152 | "pointer", // sqlite3_stmt *pStmt 153 | "i32", // int iCol 154 | ], 155 | result: "pointer", 156 | }, 157 | 158 | sqlite3_column_bytes: { 159 | parameters: [ 160 | "pointer", // sqlite3_stmt *pStmt 161 | "i32", // int iCol 162 | ], 163 | result: "i32", 164 | }, 165 | 166 | sqlite3_column_name: { 167 | parameters: [ 168 | "pointer", // sqlite3_stmt *pStmt 169 | "i32", // int iCol 170 | ], 171 | result: "pointer", 172 | }, 173 | 174 | sqlite3_column_decltype: { 175 | parameters: [ 176 | "pointer", // sqlite3_stmt *pStmt 177 | "i32", // int iCol 178 | ], 179 | result: "u64", 180 | }, 181 | 182 | sqlite3_bind_parameter_index: { 183 | parameters: [ 184 | "pointer", // sqlite3_stmt *pStmt 185 | "buffer", // const char *zName 186 | ], 187 | result: "i32", 188 | }, 189 | 190 | sqlite3_bind_text: { 191 | parameters: [ 192 | "pointer", // sqlite3_stmt *pStmt 193 | "i32", // int iCol 194 | "buffer", // const char *zData 195 | "i32", // int nData 196 | "pointer", // void (*xDel)(void*) 197 | ], 198 | result: "i32", 199 | }, 200 | 201 | sqlite3_bind_blob: { 202 | parameters: [ 203 | "pointer", // sqlite3_stmt *pStmt 204 | "i32", // int iCol 205 | "buffer", // const void *zData 206 | "i32", // int nData 207 | "pointer", // void (*xDel)(void*) 208 | ], 209 | result: "i32", 210 | }, 211 | 212 | sqlite3_bind_double: { 213 | parameters: [ 214 | "pointer", // sqlite3_stmt *pStmt 215 | "i32", // int iCol 216 | "f64", // double rValue 217 | ], 218 | result: "i32", 219 | }, 220 | 221 | sqlite3_bind_int: { 222 | parameters: [ 223 | "pointer", // sqlite3_stmt *pStmt 224 | "i32", // int iCol 225 | "i32", // int iValue 226 | ], 227 | result: "i32", 228 | }, 229 | 230 | sqlite3_bind_int64: { 231 | parameters: [ 232 | "pointer", // sqlite3_stmt *pStmt 233 | "i32", // int iCol 234 | "i64", // i64 iValue 235 | ], 236 | result: "i32", 237 | }, 238 | 239 | sqlite3_bind_null: { 240 | parameters: [ 241 | "pointer", // sqlite3_stmt *pStmt 242 | "i32", // int iCol 243 | ], 244 | result: "i32", 245 | }, 246 | 247 | sqlite3_expanded_sql: { 248 | parameters: [ 249 | "pointer", // sqlite3_stmt *pStmt 250 | ], 251 | result: "pointer", 252 | }, 253 | 254 | sqlite3_bind_parameter_count: { 255 | parameters: [ 256 | "pointer", // sqlite3_stmt *pStmt 257 | ], 258 | result: "i32", 259 | }, 260 | 261 | sqlite3_complete: { 262 | parameters: [ 263 | "buffer", // const char *sql 264 | ], 265 | result: "i32", 266 | }, 267 | 268 | sqlite3_sourceid: { 269 | parameters: [], 270 | result: "pointer", 271 | }, 272 | 273 | sqlite3_libversion: { 274 | parameters: [], 275 | result: "pointer", 276 | }, 277 | 278 | sqlite3_blob_open: { 279 | parameters: [ 280 | "pointer", /* sqlite3 *db */ 281 | "buffer", /* const char *zDb */ 282 | "buffer", /* const char *zTable */ 283 | "buffer", /* const char *zColumn */ 284 | "i64", /* sqlite3_int64 iRow */ 285 | "i32", /* int flags */ 286 | "buffer", /* sqlite3_blob **ppBlob */ 287 | ], 288 | result: "i32", 289 | }, 290 | 291 | sqlite3_blob_read: { 292 | parameters: [ 293 | "pointer", /* sqlite3_blob *blob */ 294 | "buffer", /* void *Z */ 295 | "i32", /* int N */ 296 | "i32", /* int iOffset */ 297 | ], 298 | result: "i32", 299 | }, 300 | 301 | sqlite3_blob_write: { 302 | parameters: [ 303 | "pointer", /* sqlite3_blob *blob */ 304 | "buffer", /* const void *z */ 305 | "i32", /* int n */ 306 | "i32", /* int iOffset */ 307 | ], 308 | result: "i32", 309 | }, 310 | 311 | sqlite3_blob_bytes: { 312 | parameters: ["pointer" /* sqlite3_blob *blob */], 313 | result: "i32", 314 | }, 315 | 316 | sqlite3_blob_close: { 317 | parameters: ["pointer" /* sqlite3_blob *blob */], 318 | result: "i32", 319 | }, 320 | 321 | sqlite3_sql: { 322 | parameters: [ 323 | "pointer", // sqlite3_stmt *pStmt 324 | ], 325 | result: "pointer", 326 | }, 327 | 328 | sqlite3_stmt_readonly: { 329 | parameters: [ 330 | "pointer", // sqlite3_stmt *pStmt 331 | ], 332 | result: "i32", 333 | }, 334 | 335 | sqlite3_bind_parameter_name: { 336 | parameters: [ 337 | "pointer", // sqlite3_stmt *pStmt 338 | "i32", // int iCol 339 | ], 340 | result: "pointer", 341 | }, 342 | 343 | sqlite3_errcode: { 344 | parameters: [ 345 | "pointer", // sqlite3 *db 346 | ], 347 | result: "i32", 348 | }, 349 | 350 | sqlite3_errmsg: { 351 | parameters: [ 352 | "pointer", // sqlite3 *db 353 | ], 354 | result: "pointer", 355 | }, 356 | 357 | sqlite3_errstr: { 358 | parameters: [ 359 | "i32", // int rc 360 | ], 361 | result: "pointer", 362 | }, 363 | 364 | sqlite3_column_int64: { 365 | parameters: [ 366 | "pointer", // sqlite3_stmt *pStmt 367 | "i32", // int iCol 368 | ], 369 | result: "i64", 370 | }, 371 | 372 | sqlite3_backup_init: { 373 | parameters: [ 374 | "pointer", // sqlite3 *pDest 375 | "buffer", // const char *zDestName 376 | "pointer", // sqlite3 *pSource 377 | "buffer", // const char *zSourceName 378 | ], 379 | result: "pointer", 380 | }, 381 | 382 | sqlite3_backup_step: { 383 | parameters: [ 384 | "pointer", // sqlite3_backup *p 385 | "i32", // int nPage 386 | ], 387 | result: "i32", 388 | }, 389 | 390 | sqlite3_backup_finish: { 391 | parameters: [ 392 | "pointer", // sqlite3_backup *p 393 | ], 394 | result: "i32", 395 | }, 396 | 397 | sqlite3_backup_remaining: { 398 | parameters: [ 399 | "pointer", // sqlite3_backup *p 400 | ], 401 | result: "i32", 402 | }, 403 | 404 | sqlite3_backup_pagecount: { 405 | parameters: [ 406 | "pointer", // sqlite3_backup *p 407 | ], 408 | result: "i32", 409 | }, 410 | 411 | sqlite3_create_function: { 412 | parameters: [ 413 | "pointer", // sqlite3 *db 414 | "buffer", // const char *zFunctionName 415 | "i32", // int nArg 416 | "i32", // int eTextRep 417 | "pointer", // void *pApp 418 | "pointer", // void (*xFunc)(sqlite3_context*,int,sqlite3_value**) 419 | "pointer", // void (*xStep)(sqlite3_context*,int,sqlite3_value**) 420 | "pointer", // void (*xFinal)(sqlite3_context*) 421 | ], 422 | result: "i32", 423 | }, 424 | 425 | sqlite3_result_blob: { 426 | parameters: [ 427 | "pointer", // sqlite3_context *p 428 | "buffer", // const void *z 429 | "i32", // int n 430 | "isize", // void (*xDel)(void*) 431 | ], 432 | result: "void", 433 | }, 434 | 435 | sqlite3_result_double: { 436 | parameters: [ 437 | "pointer", // sqlite3_context *p 438 | "f64", // double rVal 439 | ], 440 | result: "void", 441 | }, 442 | 443 | sqlite3_result_error: { 444 | parameters: [ 445 | "pointer", // sqlite3_context *p 446 | "buffer", // const char *z 447 | "i32", // int n 448 | ], 449 | result: "void", 450 | }, 451 | 452 | sqlite3_result_int: { 453 | parameters: [ 454 | "pointer", // sqlite3_context *p 455 | "i32", // int iVal 456 | ], 457 | result: "void", 458 | }, 459 | 460 | sqlite3_result_int64: { 461 | parameters: [ 462 | "pointer", // sqlite3_context *p 463 | "i64", // sqlite3_int64 iVal 464 | ], 465 | result: "void", 466 | }, 467 | 468 | sqlite3_result_null: { 469 | parameters: [ 470 | "pointer", // sqlite3_context *p 471 | ], 472 | result: "void", 473 | }, 474 | 475 | sqlite3_result_text: { 476 | parameters: [ 477 | "pointer", // sqlite3_context *p 478 | "buffer", // const char *z 479 | "i32", // int n 480 | "isize", // void (*xDel)(void*) 481 | ], 482 | result: "void", 483 | }, 484 | 485 | sqlite3_value_type: { 486 | parameters: [ 487 | "pointer", // sqlite3_value *pVal 488 | ], 489 | result: "i32", 490 | }, 491 | sqlite3_value_subtype: { 492 | parameters: [ 493 | "pointer", // sqlite3_value *pVal 494 | ], 495 | result: "i32", 496 | }, 497 | 498 | sqlite3_value_blob: { 499 | parameters: [ 500 | "pointer", // sqlite3_value *pVal 501 | ], 502 | result: "pointer", 503 | }, 504 | 505 | sqlite3_value_double: { 506 | parameters: [ 507 | "pointer", // sqlite3_value *pVal 508 | ], 509 | result: "f64", 510 | }, 511 | 512 | sqlite3_value_int: { 513 | parameters: [ 514 | "pointer", // sqlite3_value *pVal 515 | ], 516 | result: "i32", 517 | }, 518 | 519 | sqlite3_value_int64: { 520 | parameters: [ 521 | "pointer", // sqlite3_value *pVal 522 | ], 523 | result: "i64", 524 | }, 525 | 526 | sqlite3_value_text: { 527 | parameters: [ 528 | "pointer", // sqlite3_value *pVal 529 | ], 530 | result: "pointer", 531 | }, 532 | 533 | sqlite3_value_bytes: { 534 | parameters: [ 535 | "pointer", // sqlite3_value *pVal 536 | ], 537 | result: "i32", 538 | }, 539 | 540 | sqlite3_aggregate_context: { 541 | parameters: [ 542 | "pointer", // sqlite3_context *p 543 | "i32", // int nBytes 544 | ], 545 | result: "pointer", 546 | }, 547 | 548 | sqlite3_enable_load_extension: { 549 | parameters: [ 550 | "pointer", // sqlite3 *db 551 | "i32", // int onoff 552 | ], 553 | result: "i32", 554 | }, 555 | 556 | sqlite3_load_extension: { 557 | parameters: [ 558 | "pointer", // sqlite3 *db 559 | "buffer", // const char *zFile 560 | "buffer", // const char *zProc 561 | "buffer", // const char **pzErrMsg 562 | ], 563 | result: "i32", 564 | }, 565 | 566 | sqlite3_initialize: { 567 | parameters: [], 568 | result: "i32", 569 | }, 570 | } as const satisfies Deno.ForeignLibraryInterface; 571 | 572 | let lib: Deno.DynamicLibrary["symbols"]; 573 | 574 | export default function(path?: string) { 575 | if (!lib) { 576 | lib = Deno.dlopen(path!, symbols).symbols; 577 | const init = lib.sqlite3_initialize(); 578 | if (init !== 0) { 579 | throw new Error(`Failed to initialize SQLite3: ${init}`); 580 | } 581 | } 582 | return lib 583 | } 584 | --------------------------------------------------------------------------------