├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── build-integration-aws.yml │ ├── build-integration-local.yml │ └── build-synapse.yml ├── .gitignore ├── LICENSE ├── README.md ├── blog └── creating-synapse.md ├── docs ├── contributing.md ├── custom-resources.md ├── environments.md ├── faq.md ├── getting-started.md ├── native-modules.md ├── packages.md ├── providers.md └── testing.md ├── examples ├── sdk-and-cli │ ├── README.md │ ├── cli │ │ ├── main.ts │ │ └── package.json │ └── sdk │ │ ├── main.ts │ │ └── package.json ├── websites │ ├── preact-bucket │ │ ├── app.tsx │ │ ├── package.json │ │ └── tsconfig.json │ └── react-bucket │ │ ├── app.tsx │ │ ├── package.json │ │ └── tsconfig.json └── workflows │ ├── README.md │ ├── sdk │ ├── main.ts │ └── package.json │ └── simple │ ├── main.ts │ └── package.json ├── integrations ├── aws │ ├── package.json │ ├── src │ │ ├── index.ts │ │ ├── permissions.ts │ │ ├── services │ │ │ ├── api-gateway.ts │ │ │ ├── certificate-manager.ts │ │ │ ├── clients.ts │ │ │ ├── cloudfront.ts │ │ │ ├── cloudwatch-logs.ts │ │ │ ├── dynamodb.ts │ │ │ ├── ec2.ts │ │ │ ├── ecr.ts │ │ │ ├── ecs.ts │ │ │ ├── event-bridge.ts │ │ │ ├── iam.ts │ │ │ ├── kinesis.ts │ │ │ ├── lambda-url.ts │ │ │ ├── lambda.ts │ │ │ ├── organizations.ts │ │ │ ├── route53.ts │ │ │ ├── s3.ts │ │ │ ├── secrets-manager.ts │ │ │ └── sqs.ts │ │ ├── sigv4.ts │ │ └── utils.ts │ └── tsconfig.json ├── local │ ├── package.json │ ├── src │ │ ├── bucket.ts │ │ ├── cdn.ts │ │ ├── counter.ts │ │ ├── function.ts │ │ ├── gateway.ts │ │ ├── index.ts │ │ ├── provider.ts │ │ ├── queue.ts │ │ └── table.ts │ └── tsconfig.json ├── preact │ ├── main.ts │ ├── package.json │ └── tsconfig.json ├── react │ ├── main.ts │ ├── package.json │ └── tsconfig.json └── websites │ ├── package.json │ ├── src │ ├── hooks.ts │ ├── host.ts │ └── runtime.ts │ └── tsconfig.json ├── package.json ├── packages ├── auth.tgz ├── quotes.tgz └── resources.tgz ├── src ├── artifacts.ts ├── auth │ └── index.ts ├── backendClient.ts ├── build-fs │ ├── backup.ts │ ├── block.ts │ ├── gc.ts │ ├── gcWorker.ts │ ├── pointers.ts │ ├── remote.ts │ ├── stats.ts │ └── utils.ts ├── build │ ├── builder.ts │ ├── go.ts │ ├── sea.ts │ └── sources.ts ├── bundler.ts ├── cli │ ├── buildInternal.ts │ ├── commands.ts │ ├── completions │ │ ├── completion.sh │ │ └── completion.ts │ ├── config.ts │ ├── daemon.ts │ ├── index.ts │ ├── install.ps1 │ ├── install.sh │ ├── install.ts │ ├── logger.ts │ ├── ui.ts │ ├── updater.ts │ └── views │ │ ├── compile.ts │ │ ├── deploy.ts │ │ ├── install.ts │ │ └── test.ts ├── closures.ts ├── codegen │ ├── openapiHelpers.ts │ ├── openapiv3.ts │ ├── providers.ts │ └── schemas.ts ├── compiler │ ├── classifier.ts │ ├── config.ts │ ├── declarations.ts │ ├── diagnostics.ts │ ├── entrypoints.ts │ ├── esm.ts │ ├── host.ts │ ├── incremental.ts │ ├── programBuilder.ts │ ├── resourceGraph.ts │ ├── transformer.ts │ └── validation.ts ├── deploy │ ├── deployment.ts │ ├── httpServer.ts │ ├── registry.ts │ ├── server.ts │ ├── session.ts │ └── state.ts ├── deserializer.ts ├── events.ts ├── execution.ts ├── git.ts ├── index.ts ├── loader.ts ├── logging.ts ├── optimizer.ts ├── perf │ └── profiles.ts ├── permissions.ts ├── pm │ ├── attestations.ts │ ├── autoInstall.ts │ ├── compat.ts │ ├── integrity.ts │ ├── manifests.ts │ ├── packageJson.ts │ ├── packages.ts │ ├── publish.ts │ ├── repos │ │ ├── github.ts │ │ └── spr.ts │ ├── tools.ts │ └── versions.ts ├── refactoring.ts ├── repl.ts ├── runtime │ ├── engine.ts │ ├── env.ts │ ├── importMaps.ts │ ├── loader.ts │ ├── lookup.ts │ ├── modules │ │ ├── core.ts │ │ ├── http.ts │ │ ├── key-service.ts │ │ ├── lib.ts │ │ ├── reify.ts │ │ ├── serdes.ts │ │ ├── services.ts │ │ ├── terraform.ts │ │ ├── test.ts │ │ ├── validation.ts │ │ └── ws.ts │ ├── nodeLoader.ts │ ├── resolver.ts │ ├── rootLoader.ts │ ├── sourceMaps.ts │ ├── srl │ │ ├── README.md │ │ ├── compute │ │ │ └── index.ts │ │ ├── index.ts │ │ ├── net │ │ │ └── index.ts │ │ ├── storage │ │ │ └── index.ts │ │ └── websites.ts │ └── utils.ts ├── services │ ├── analytics │ │ ├── backend.ts │ │ ├── daemon.ts │ │ ├── deviceId.ts │ │ └── index.ts │ ├── logs.ts │ └── secrets │ │ ├── index.ts │ │ └── inmem.ts ├── static-solver │ ├── compiler.ts │ ├── index.ts │ ├── scopes.ts │ ├── solver.ts │ └── utils.ts ├── system.ts ├── templates.ts ├── testing │ ├── index.ts │ └── internal.ts ├── utils.ts ├── utils │ ├── caches.ts │ ├── convertNodePrimordials.ts │ ├── diffs.ts │ ├── github.ts │ ├── glob.ts │ ├── http.ts │ ├── memento.ts │ ├── process.ts │ ├── stateless-watcher │ │ ├── binding.ts │ │ ├── main.zig │ │ ├── mem.zig │ │ ├── static_string_map.zig │ │ ├── thread_pool.zig │ │ └── watcher.ts │ └── tar.ts ├── workspaces.ts └── zig │ ├── ast.ts │ ├── ast.zig │ ├── compile.ts │ ├── fs-ext.ts │ ├── fs-ext.zig │ ├── gccHeaders.ts │ ├── installer.ts │ ├── lib │ ├── js.zig │ └── mem.zig │ ├── util.ts │ ├── util.zig │ └── win32 │ ├── load-hook.zig │ └── shim.zig ├── test ├── conformance │ ├── bucket.ts │ ├── function.ts │ ├── httpService.ts │ ├── queue.ts │ ├── table.ts │ └── util.ts └── fixtures │ ├── branches │ └── isolation │ │ └── main.ts │ ├── deploy │ ├── added-file-nested │ │ └── main.ts │ ├── added-file-nested2 │ │ └── main.ts │ ├── added-file-nested3 │ │ └── main.ts │ ├── added-file │ │ └── main.ts │ ├── added-file2 │ │ ├── main.ts │ │ └── tsconfig.json │ ├── added-file3 │ │ ├── main.ts │ │ └── tsconfig.json │ ├── removed-file │ │ └── main.ts │ ├── transitive-side-effect │ │ ├── b.ts │ │ └── main.ts │ └── transitive-side-effect2 │ │ ├── b.ts │ │ └── main.ts │ ├── env │ └── main.ts │ ├── pm │ ├── pkg-archive │ │ ├── main.ts │ │ ├── package.json │ │ └── pkg-b │ │ │ ├── main.ts │ │ │ └── package.json │ ├── provider-version │ │ ├── main.ts │ │ └── package.json │ └── unofficial-provider │ │ ├── main.ts │ │ └── package.json │ ├── run │ ├── added-file-nested │ │ └── main.ts │ ├── added-file │ │ └── main.ts │ ├── auto-install │ │ ├── main.ts │ │ └── package.json │ ├── main-fn │ │ └── main.ts │ ├── prompt-deploy │ │ └── main.ts │ ├── two-mains │ │ ├── main.ts │ │ ├── one.ts │ │ └── two.ts │ └── variations │ │ ├── folder │ │ └── hello.ts │ │ └── main.ts │ ├── source-maps │ ├── compile-time.ts │ ├── deploy-time.ts │ └── main.ts │ ├── status │ └── opt │ │ └── main.ts │ ├── synth │ ├── assets │ │ ├── main.ts │ │ └── my-data.json │ ├── cache-indirect │ │ └── main.ts │ ├── capture │ │ └── main.ts │ ├── classes │ │ └── main.ts │ ├── conditional-test │ │ └── main.ts │ ├── default-provider │ │ ├── main.ts │ │ └── package.json │ ├── deployed-module │ │ ├── main.ts │ │ ├── package.json │ │ └── pkg │ │ │ ├── main.ts │ │ │ └── package.json │ ├── fn-bind │ │ ├── main.ts │ │ └── symEval.ts │ ├── js-symbols │ │ └── main.ts │ ├── let-bindings │ │ └── main.ts │ ├── react-jsx │ │ ├── main.tsx │ │ └── tsconfig.json │ ├── runtime-transforms │ │ ├── callables-exported.ts │ │ ├── callables.ts │ │ └── main.ts │ ├── stub-when-bundled │ │ └── main.ts │ └── test-hooks │ │ └── main.ts │ ├── test │ ├── cache-indirect │ │ └── main.ts │ ├── caching │ │ └── main.ts │ ├── hooks │ │ └── main.ts │ ├── removed-file │ │ └── main.ts │ └── rollback-if-failed │ │ └── main.ts │ └── zig-modules │ ├── classes │ ├── adder.zig │ ├── main.ts │ └── tsconfig.json │ ├── host-arch │ ├── add.zig │ ├── main.ts │ └── tsconfig.json │ ├── multiple-files │ ├── main.ts │ ├── one.zig │ ├── shared.zig │ ├── tsconfig.json │ └── two.zig │ ├── promises │ ├── main.ts │ ├── mod.zig │ └── tsconfig.json │ ├── simple-outdir │ ├── add.zig │ ├── main.ts │ └── tsconfig.json │ ├── simple │ ├── add.zig │ ├── main.ts │ └── tsconfig.json │ ├── strings │ ├── main.ts │ ├── mod.zig │ └── tsconfig.json │ └── types │ ├── expected.d.ts │ ├── main.ts │ ├── mod.zig │ └── tsconfig.json └── tsconfig.json /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ### Problem 13 | 31 | 32 | ### System Info 33 | 34 | 35 | 38 | 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | 13 | 14 | 17 | 25 | 26 | 27 | ### Suggestion 28 | 36 | 37 | 40 | -------------------------------------------------------------------------------- /.github/workflows/build-integration-aws.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | paths: 6 | - "!.github/**" 7 | - .github/workflows/build-integration-aws.yml 8 | - integrations/aws/** 9 | 10 | jobs: 11 | run_test: 12 | runs-on: 13 | - ubuntu-latest 14 | permissions: 15 | id-token: write 16 | contents: read 17 | steps: 18 | - uses: actions/checkout@v3 19 | - name: Configure AWS credentials 20 | uses: aws-actions/configure-aws-credentials@v2 21 | with: 22 | aws-region: us-west-2 23 | role-to-assume: ${{ vars.TEST_IAM_ROLE }} 24 | - run: curl -fsSL https://synap.sh/install | bash 25 | - run: echo "SYNAPSE_INSTALL=$HOME/.synapse" >> "$GITHUB_ENV" 26 | - run: echo "${SYNAPSE_INSTALL}/bin" >> "$GITHUB_PATH" 27 | - run: synapse --version 28 | - run: cd integrations/aws && synapse compile && synapse publish --local 29 | name: Build Integration 30 | - run: cd test/conformance && synapse compile --target aws 31 | name: Compile Test Suite 32 | - run: cd test/conformance && synapse test 33 | name: Run Tests 34 | - if: always() 35 | run: cd test/conformance && synapse destroy 36 | name: Clean-up 37 | -------------------------------------------------------------------------------- /.github/workflows/build-integration-local.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | branches: 4 | - main 5 | paths: 6 | - "!.github/**" 7 | - .github/workflows/build-integration-local.yml 8 | - integrations/local/** 9 | 10 | jobs: 11 | run_test: 12 | runs-on: 13 | - ubuntu-latest 14 | permissions: 15 | id-token: write 16 | contents: read 17 | steps: 18 | - uses: Cohesible/get-credentials-action@670287aebd309e1890507ab8ee7c8ed7eefa4c10 19 | - uses: actions/checkout@v3 20 | - run: curl -fsSL https://synap.sh/install | bash 21 | - run: synapse --version 22 | - run: cd integrations/local && synapse compile && synapse publish --local 23 | name: Build 24 | - run: cd test/conformance && synapse compile && synapse test && synapse destroy 25 | name: Test 26 | -------------------------------------------------------------------------------- /.github/workflows/build-synapse.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | paths: 6 | - "!.github/**" 7 | - .github/workflows/build-synapse.yml 8 | - src/** 9 | - test/** 10 | pull_request: 11 | branches: 12 | - main 13 | paths: 14 | - "!.github/**" 15 | - .github/workflows/build-synapse.yml 16 | - src/** 17 | - test/** 18 | 19 | jobs: 20 | run_test: 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | include: 25 | - os: macos-13 26 | - os: macos-14 27 | - os: ubuntu-latest 28 | - os: windows-2022 29 | runs-on: ${{ matrix.os }} 30 | permissions: 31 | contents: read 32 | steps: 33 | - uses: actions/checkout@v3 34 | 35 | - run: curl -fsSL https://synap.sh/install | bash 36 | if: matrix.os != 'windows-2022' 37 | - run: irm https://synap.sh/install.ps1 | iex 38 | if: matrix.os == 'windows-2022' 39 | - run: synapse --version 40 | 41 | - run: synapse compile 42 | - run: synapse run testFixtures 43 | if: matrix.os != 'windows-2022' # TODO: need bash to run the fixture tests 44 | - run: synapse build 45 | - run: ./dist/bin/synapse 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | out 2 | dist 3 | node_modules 4 | **/.env 5 | **/.env.* 6 | **/.DS_Store 7 | *.d.zig.ts 8 | internal 9 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | ## Prerequisites 2 | You'll need the latest version of Synapse installed 3 | 4 | ## Synapse CLI 5 | The bulk of Synapse lives in `src`. Use `synapse compile` in the root of the repository to build. There is a fast and slow way to test your changes. The fast way requires a little bit of setup but results in quicker iterations. The slow way creates an executable which is most similar to the release build of Synapse. 6 | 7 | ### Slow Way 8 | 9 | Run `synapse compile` + `synapse build`. The executable will be placed in `dist/bin` e.g. `dist/bin/synapse` for Linux/macOS and `dist/bin/synapse.exe` for Windows. 10 | 11 | ### Fast Way 12 | 13 | This is currently unreliable. Use the slow way for now. 14 | 15 | 40 | 41 | ## Integrations (aka compiler backends) 42 | 43 | Synapse uses a plugin-like architecture for loading deployment target implementations. Packages can contribute implementations by using the `addTarget` function from `synapse:core`. See [this file](../integrations/local/src/function.ts) for a reasonably simple example. 44 | 45 | Integrations are packages within this repository found under the `integrations` directory. The directory name matches the target e.g. `integrations/local` for `synapse compile --target local`. Future improvments may allow for substituting these built-in references. 46 | 47 | ### Building and Testing 48 | 49 | Each integration is built with `synapse compile`. Use `synapse publish --local` to use the locally built package for future compilations. 50 | 51 | Currently there are no integration-specific tests. Integrations are validated against `synapse:srl/*` by running the [conformance suite](../test/conformance). You can build and run this suite by running the following from the repository root: 52 | 53 | ```shell 54 | cd test/conformance && synapse compile && synapse test 55 | ``` 56 | 57 | This will use the `local` target by default. You can change the target by adding it to `synapse compile`: 58 | 59 | ```shell 60 | cd test/conformance && synapse compile --target aws && synapse test 61 | ``` 62 | 63 | ## Documentation 64 | 65 | ### Commands 66 | 67 | Output from `synapse help` is generated from command declarations [here](../src/cli/commands.ts). 68 | 69 | The easiest way to find the command you want to edit is to search for the command name in single quotes e.g. `'compile'`. 70 | 71 | ### Everything else 72 | 73 | All other documentation is hand-written and lives in the [docs](../docs) directory. These files will eventually be used as content for a documentation site. 74 | 75 | ## The `packages` directory 76 | 77 | The tarballs in this directory contain services (or rather, service stubs) that may eventually be used in either Synapse directly, or possibly a separate CLI entirely. These are currently closed-source for two main reasons: 78 | 79 | 1. It's difficult to open-source _live_ services (but Synapse _will_ make it easier!) 80 | 2. They may become apart of a strategy to help fund the development of Synapse 81 | 82 | Right now only "quotes" does anything. The rest are stubs and will fail if used. -------------------------------------------------------------------------------- /docs/environments.md: -------------------------------------------------------------------------------- 1 | ## Overview 2 | 3 | Environments allow you create multiple deployments from the same code. These deployments are _isolated_ from one another, meaning you can test your changes in a safe way before shipping them out. 4 | 5 | ### Switching Environments 6 | 7 | The environment variable `SYNAPSE_ENV` is used to switch environments. For example, I might enter the `beta` environment like so: 8 | 9 | ```shell 10 | export SYNAPSE_ENV=beta 11 | synapse status 12 | # env: beta 13 | ``` 14 | 15 | Environment names are not special with two exceptions: 16 | * `local` is the default environment. Deployments created in this environment are assumed to be isolated to a single user/machine. 17 | * Names that contain the word `production` are automatically considered a production enviroment. Production environments have additional safeguards in place: 18 | * Prevents replacement of stateful resources whenever possible 19 | * `synapse destroy` requires a confirmation 20 | 21 | ### Configuration 22 | 23 | In theory, environments should be _exact_ copies of one another. In practice, this often isn't the case. 24 | 25 | The current support for per-environment configuration is limited to `.env` files. Synapse automatically reads environment variables from a `.env.${envName}` file in the working directory. For example, `.env.beta` would be used when `SYNAPSE_ENV` is set to `beta`. You can then write code that uses `process.env` to change configuration. 26 | 27 | Here are some common scenarios where you might want to tweak things slightly per-environment: 28 | * Cost optimization e.g. smaller host machines in developer/test environments, less frequent backups, etc. 29 | * Enabling debug options/tools in non-prod, disabling them in prod 30 | * External dependencies like API keys or DB urls 31 | * These _should_ be converted to [resources](./custom-resources.md) whenever possible 32 | 33 | In general, the differences between non-prod and prod should be _subtractive_. That is, prod potentially executes _less_ code than non-prod, never more. The code would never be tested otherwise! 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | ## I don't need to deploy to the cloud, can Synapse still help me? 2 | Yes! The technology behind Synapse can be used for far more than just cloud infrastructure. 3 | 4 | You can think of Synapse as a build system that is also apart of your application (but only at build time!). This allows for all sorts of things: 5 | * Bundling closures 6 | * Automatically downloading/building/generating dependencies 7 | * Dead-code elimination (more than ES module tree-shaking) 8 | * Customizeable builds similar to C's `#if` directive 9 | 10 | And many more things that haven't been released yet! 11 | 12 | -------------------------------------------------------------------------------- /docs/native-modules.md: -------------------------------------------------------------------------------- 1 | ## Overview 2 | 3 | Native modules are any non-JavaScript code that is callable from within JavaScript. This is sometimes done because: 4 | * Direct access to syscalls is needed 5 | * Code re-use (many libraries are written in C/C++) 6 | * Well-crafted code in a systems language will generally be more performant than JavaScript 7 | 8 | Synapse has first-class support for native modules using the [Zig](https://ziglang.org/) programming language. The integration currently requires pinning to a specific Zig version (0.13.0), which is automatically downloaded as-needed. 9 | 10 | **`allowArbitraryExtensions` must be enabled in `tsconfig.json` to use native modules.** This requirement may be removed in a future release. Here's a minimal `tsconfig.json` file for reference: 11 | ```json 12 | { 13 | "compilerOptions": { 14 | "allowArbitraryExtensions": true 15 | } 16 | } 17 | ``` 18 | 19 | `*.d.zig.ts` files are automatically generated for imported Zig modules. Adding this pattern to `.gitignore` is recommended. 20 | 21 | ## Basic usage 22 | 23 | When working with many primitive types, things "just work". Simply treat Zig files as-if they were any other module, making sure to import them with the `.zig` suffix: 24 | 25 | ```main.ts 26 | import { add } from './add.zig' 27 | 28 | export function main() { 29 | console.log(add(2, 2)) 30 | } 31 | ``` 32 | 33 | ```add.zig 34 | pub fn add(a: u32, b: u32) u32 { 35 | return a + b; 36 | } 37 | ``` 38 | 39 | ## The `js` module 40 | 41 | Many things we take for granted in JavaScript (Promises, strings, arrays) do not translate so easily to Zig. The `js` module is shipped with Synapse and provides APIs for working with the JavaScript runtime. 42 | 43 | 44 | ### Strings 45 | 46 | Modern JavaScript engines (V8, JSC, etc.) have various optimizations for strings that make it difficult to use them directly from native code. We can simplify things by copying the string into an owned-buffer with `js.UTF8String`: 47 | 48 | ```fs.zig 49 | const js = @import("js"); 50 | const std = @import("std"); 51 | 52 | pub fn openSync(src: js.UTF8String) !i32 { 53 | const file = try std.fs.openFileAbsolute(src.data, .{}); 54 | 55 | return .{ file.fd }; 56 | } 57 | ``` 58 | 59 | ```main.ts 60 | import { openSync } from './fs.zig' 61 | 62 | export function main() { 63 | console.log(openSync('fs.zig')) 64 | } 65 | ``` 66 | 67 | Parameters typed as `[:0]u8` are also treated as strings. 68 | 69 | ### Promises 70 | 71 | We can declare an exported function as "async" by returning a `js.Promise`: 72 | 73 | ```add.zig 74 | const js = @import("js"); 75 | 76 | pub fn addAsync(a: u32, b: u32) js.Promise(u32) { 77 | return .{ a + b }; 78 | } 79 | ``` 80 | 81 | ```main.ts 82 | import { addAsync } from './add.zig' 83 | 84 | export function main() { 85 | console.log(await add(2, 2)) 86 | } 87 | ``` 88 | 89 | Note that `js.Promise` changes how the Zig function is called from JavaScript by running the function in a thread pool. Zig functions calling Zig functions that return `js.Promise` will appear synchronous. 90 | 91 | ### Errors 92 | 93 | Errors returned by a Zig function are bubbled up to the JS side largely as-is: 94 | 95 | ```error.zig 96 | pub fn fail() !void { 97 | return error.Failed; 98 | } 99 | ``` 100 | 101 | ```main.ts 102 | import { fail } from './error.zig' 103 | 104 | export function main() { 105 | try { 106 | fail() 107 | } catch (e) { 108 | console.log(e) 109 | } 110 | } 111 | ``` 112 | 113 | This will show `[Error: Native error] { code: 'Failed' }`. Stack traces will be added soon. 114 | 115 | 116 | ### Structs 117 | 118 | Structs passed to or returned from native modules are automatically converted to/from JS objects for convenience. This can be opted-out of by using `*js.Object` instead of the struct type. 119 | 120 | -------------------------------------------------------------------------------- /docs/packages.md: -------------------------------------------------------------------------------- 1 | ## Overview 2 | 3 | A "package" is a collection of code that is meant to be shared. Packages allow you to build and/or deploy software without knowing exactly how other packages are implemented. 4 | 5 | ### `package.json` 6 | 7 | Synapse follows in the footsteps of `npm`, and so it uses `package.json` to determine which directories are packages. [The documentation from `npm`](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) also applies to Synapse except for the following fields: 8 | * `files` * 9 | * `directories` 10 | * `man` 11 | * `config` 12 | * `publishConfig` 13 | * `overrides` * 14 | * `scripts` ** 15 | * `bundleDependencies` 16 | 17 | Legend: 18 | - \* Under consideration 19 | - ** Partial support 20 | 21 | Package `scripts` can be executed with `synapse run` but are otherwise ignored. This includes "install" scripts, which means packages that rely on building from source on install do not work yet. 22 | 23 | ### Publishing 24 | 25 | Publishing packages is currently limited to the following destinations: 26 | 27 | * A local repository via `synapse publish --local` 28 | * An archive using `synapse publish --archive ` 29 | 30 | Support for publishing directly to `npm` is planned in addition to a repository specifically for Synapse. 31 | 32 | ### "Deployed" Packages 33 | 34 | Code produced by Synapse can be shared _after_ running `synapse deploy`. The shared code is no longer 1:1 with the source code but rather a reduced form of it. 35 | 36 | A good example of this is an SDK for an API. See [this directory](../examples/sdk-and-cli/) for a mini project that creates an SDK + CLI tool. 37 | 38 | ### Best Practices 39 | 40 | It is ***incredibly important*** to understand that "deployed" packages are _not_ always isolated from changes you make to the original package. This is because the package might reference resources that you can change. 41 | 42 | ***If you were to run `synapse destroy`, all consumers of the package might immediately break if you delete referenced resources!*** 43 | 44 | Fortunately, Synapse has safeguards to prevent someone from accidentally doing this. But we still need solutions for stopping more subtle problems. 45 | 46 | #### Environments 47 | 48 | [Environments](./environments.md) are a great way to test changes in isolation. Future improvements will make packages more "environment-aware". 49 | 50 | 57 | 58 | #### Rollbacks (Experimental) 59 | 60 | A deployment can be changed back to its previous configuration through several mechanisms: 61 | 62 | * Manually with `synapse rollback` 63 | * When tests fails via `synapse test --rollback-if-failed` 64 | * When deploying fails via `synapse deploy --rollback-if-failed` 65 | 66 | Rollbacks are best-effort. Their success (or failure) is dependent on which resources were changed. In particular, resources that cause side-effects are less likely to be safely rolled back. 67 | 68 | #### Immutable Deployments (Planned) 69 | 70 | Many resources, particularly stateless ones, do not need to be updated in-place. Instead, each deploy operation can create a new resource with the changes and swap out the references. 71 | 72 | This ensures that existing code behaves _exactly_ the same, bugs and all. The downside is that there are, more often than not, practical limitations to constantly creating new resources. But we believe these can be addressed over time. 73 | 74 | -------------------------------------------------------------------------------- /docs/providers.md: -------------------------------------------------------------------------------- 1 | ## Providers 2 | 3 | [Terraform providers](https://registry.terraform.io/browse/providers) can be used directly with Synapse. "Official" providers can be referenced directly using their name. "Unofficial" providers must be referenced using their namespace e.g. `aliyun/alicloud`. See [this package](../test/fixtures/pm/unofficial-provider/package.json) for an example. 4 | 5 | ### Adding a provider 6 | 7 | Providers can be installed by adding the name to the `synapse` section of your `package.json`: 8 | 9 | ```package.json 10 | { 11 | "synapse": { 12 | "providers": { 13 | "aws": "*" 14 | } 15 | } 16 | } 17 | ``` 18 | 19 | Then run `synapse install`. This will generate a module called `synapse-provider:` that you can use like so: 20 | 21 | ```ts 22 | import * as aws from 'synapse-provider:aws' 23 | 24 | const bucket = new aws.S3Bucket() 25 | ``` 26 | 27 | This will use the default provider configuration. You can manually specify provider configurations by creating a provider instance: 28 | 29 | ```ts 30 | import { using } from 'synapse:core' 31 | import * as aws from 'synapse-provider:aws' 32 | 33 | const provider = new aws.AwsProvider({ region: 'us-west-2' }) 34 | 35 | const bucket = using(provider, () => { 36 | return new aws.S3Bucket() 37 | }) 38 | ``` 39 | 40 | The `using` function establishes a _context_ in which resources are created in. Note that multiple context "types" may exist simultaneously e.g. AWS + Azure. Synapse's `--target` option works exactly the same as the above code, the provider is just created before execution instead of during. 41 | 42 | ### Building integrations 43 | 44 | Provider packages are a key building block for creating integrations. See the [`aws` integration](../integrations/aws) as an example. -------------------------------------------------------------------------------- /examples/sdk-and-cli/README.md: -------------------------------------------------------------------------------- 1 | ## SDK + CLI 2 | 3 | This example shows how you can use Synapse to: 4 | * Configure and deploy backend infrastructure 5 | * Create an "SDK" for that infrastructure 6 | * Build a CLI tool that uses the SDK 7 | 8 | Because our client-side code doesn't use anything specific to `node`, the SDK would work in a browser too! 9 | 10 | ### Project Structure 11 | 12 | We've split up the code into two packages: 13 | * `cli` - Feeds input from the command line into the client from `sdk` 14 | * `sdk` - Creates a bucket + public service to use the bucket. We export a function to create our client. 15 | 16 | Note that all of this code can be placed into a single package instead. This may be preferred for small projects. 17 | 18 | ### Building 19 | 20 | First navigate to `sdk` and run the following: 21 | 22 | ```shell 23 | synapse deploy 24 | synapse publish --local 25 | ``` 26 | 27 | This will deploy the necessary infrastructure + expose our `createClient` function. 28 | 29 | Now navigate to `cli`. Because `main.ts` doesn't specify any infrastructure, we can immediately try things out: 30 | 31 | ```shell 32 | synapse run -- put foo bar 33 | # Put object foo 34 | synapse run -- get foo 35 | # bar 36 | ``` 37 | 38 | We can also use `synapse build` to create a standalone executable: 39 | 40 | ```shell 41 | synapse build 42 | ./out/bin/cli get foo # or cli.exe on Windows 43 | ``` 44 | 45 | Or if we just want an executable JavaScript bundle: 46 | 47 | ```shell 48 | synapse build --no-sea 49 | ./out/main.js get foo # or `[synapse|node|bun|deno] ./out/main.js get foo` on Windows 50 | ``` 51 | 52 | -------------------------------------------------------------------------------- /examples/sdk-and-cli/cli/main.ts: -------------------------------------------------------------------------------- 1 | import { createClient } from 'sdk' 2 | 3 | const client = createClient() 4 | 5 | export async function main(command: string, key: string, val?: string) { 6 | switch (command) { 7 | case 'get': 8 | const obj = await client.getObject(key) 9 | console.log(obj) 10 | break 11 | 12 | case 'put': 13 | if (val === undefined) { 14 | throw new Error('Expected a value for the "put" command') 15 | } 16 | 17 | await client.putObject(key, val) 18 | console.log('Put object', key) 19 | break 20 | 21 | default: 22 | throw new Error(`Invalid command: ${command}`) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /examples/sdk-and-cli/cli/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cli", 3 | "bin": "./main.ts", 4 | "dependencies": { 5 | "sdk": "spr:#sdk" 6 | } 7 | } -------------------------------------------------------------------------------- /examples/sdk-and-cli/sdk/main.ts: -------------------------------------------------------------------------------- 1 | import { HttpService } from 'synapse:srl/compute' 2 | import { Bucket } from 'synapse:srl/storage' 3 | import { fetch, HttpError } from 'synapse:http' 4 | 5 | const bucket = new Bucket() 6 | const service = new HttpService({ auth: 'none' }) 7 | 8 | const getRoute = service.route('GET', '/{key+}', async req => { 9 | const { key } = req.pathParameters 10 | const data = await bucket.get(key, 'utf-8') 11 | if (data === undefined) { 12 | throw new HttpError(`Key not found: ${key}`, { status: 404 }) 13 | } 14 | 15 | return data 16 | }) 17 | 18 | const putRoute = service.route('PUT', '/{key+}', async (req, body: string) => { 19 | const { key } = req.pathParameters 20 | await bucket.put(key, body) 21 | }) 22 | 23 | export function createClient() { 24 | async function getObject(key: string): Promise { 25 | return fetch(getRoute, key) 26 | } 27 | 28 | async function putObject(key: string, obj: string): Promise { 29 | await fetch(putRoute, key, obj) 30 | } 31 | 32 | return { getObject, putObject } 33 | } 34 | -------------------------------------------------------------------------------- /examples/sdk-and-cli/sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sdk", 3 | "exports": "./main.ts" 4 | } -------------------------------------------------------------------------------- /examples/websites/preact-bucket/app.tsx: -------------------------------------------------------------------------------- 1 | import { Suspense, useRef } from 'preact/compat' 2 | import { Bucket } from 'synapse:srl/storage' 3 | import { createWebsite } from '@cohesible/synapse-preact' 4 | import { useServer, openBrowser } from '@cohesible/synapse-websites' 5 | 6 | const website = createWebsite() 7 | const bucket = new Bucket() 8 | 9 | const getData = (key: string) => { 10 | return bucket.get(key, 'utf-8').catch(e => { 11 | return (e as any).message 12 | }) 13 | } 14 | 15 | function BucketContents(props: { bucketKey: string }) { 16 | const data = useServer(getData, props.bucketKey) 17 | 18 | return
{data}
19 | } 20 | 21 | function BucketPage(props: { bucketKey: string }) { 22 | return ( 23 |
24 | loading
}> 25 | 26 | 27 | 28 | ) 29 | } 30 | 31 | const addData = website.bind(async (key: string, data: string) => { 32 | await bucket.put(key, data) 33 | }) 34 | 35 | function BucketForm() { 36 | const keyRef = useRef() 37 | const valueRef = useRef() 38 | 39 | function submit() { 40 | const key = keyRef.current.value 41 | const value = valueRef.current.value 42 | 43 | addData(key, value).then(() => { 44 | window.location = window.location 45 | }) 46 | } 47 | 48 | return ( 49 |
50 | 54 | 58 | 59 |
60 | ) 61 | } 62 | 63 | async function getItems() { 64 | console.log('getting items') 65 | return await bucket.list() 66 | } 67 | 68 | const doDelete = website.bind((key: string) => bucket.delete(key)) 69 | 70 | function BucketItem(props: { bucketKey: string }) { 71 | const k = props.bucketKey 72 | 73 | function deleteItem() { 74 | doDelete(k).then(() => { 75 | window.location = window.location 76 | }) 77 | } 78 | 79 | return ( 80 |
  • 81 |
    82 | {k} 83 | 84 |
    85 |
  • 86 | ) 87 | } 88 | 89 | function ItemList() { 90 | const items = useServer(getItems) 91 | 92 | if (items.length === 0) { 93 | return
    There's nothing in the bucket!
    94 | } 95 | 96 | return ( 97 |
      98 | {items.map(k => )} 99 |
    100 | ) 101 | } 102 | 103 | function HomePage() { 104 | return ( 105 |
    106 | 107 |

    108 | 109 | 110 | 111 |
    112 | ) 113 | } 114 | 115 | website.page('/', HomePage) 116 | website.page('/bucket/{bucketKey}', BucketPage) 117 | 118 | export async function main() { 119 | openBrowser(website.url) 120 | } 121 | 122 | -------------------------------------------------------------------------------- /examples/websites/preact-bucket/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cohesible/synapse-preact": "spr:#synapse-preact" 4 | } 5 | } -------------------------------------------------------------------------------- /examples/websites/preact-bucket/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "jsx": "react-jsx", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | "jsxImportSource": "preact" 8 | }, 9 | "include": ["app.tsx"] 10 | } -------------------------------------------------------------------------------- /examples/websites/react-bucket/app.tsx: -------------------------------------------------------------------------------- 1 | import { Suspense, useRef } from 'react' 2 | import { Bucket } from 'synapse:srl/storage' 3 | import { createWebsite } from '@cohesible/synapse-react' 4 | import { useServer, openBrowser } from '@cohesible/synapse-websites' 5 | 6 | const website = createWebsite() 7 | const bucket = new Bucket() 8 | 9 | const getData = (key: string) => { 10 | return bucket.get(key, 'utf-8').catch(e => { 11 | return (e as any).message 12 | }) 13 | } 14 | 15 | function BucketContents(props: { bucketKey: string }) { 16 | const data = useServer(getData, props.bucketKey) 17 | 18 | return
    {data}
    19 | } 20 | 21 | function BucketPage(props: { bucketKey: string }) { 22 | return ( 23 |
    24 | loading
    }> 25 | 26 | 27 | 28 | ) 29 | } 30 | 31 | const addData = website.bind(async (key: string, data: string) => { 32 | await bucket.put(key, data) 33 | }) 34 | 35 | function BucketForm() { 36 | const keyRef = useRef() 37 | const valueRef = useRef() 38 | 39 | function submit() { 40 | const key = keyRef.current.value 41 | const value = valueRef.current.value 42 | 43 | addData(key, value).then(() => { 44 | window.location = window.location 45 | }) 46 | } 47 | 48 | return ( 49 |
    50 | 54 | 58 | 59 |
    60 | ) 61 | } 62 | 63 | async function getItems() { 64 | return await bucket.list() 65 | } 66 | 67 | const doDelete = website.bind((key: string) => bucket.delete(key)) 68 | 69 | function BucketItem(props: { bucketKey: string }) { 70 | const k = props.bucketKey 71 | 72 | function deleteItem() { 73 | doDelete(k).then(() => { 74 | window.location = window.location 75 | }) 76 | } 77 | 78 | return ( 79 |
  • 80 |
    81 | {k} 82 | 83 |
    84 |
  • 85 | ) 86 | } 87 | 88 | function ItemList() { 89 | const items = useServer(getItems) 90 | 91 | if (items.length === 0) { 92 | return
    There's nothing in the bucket!
    93 | } 94 | 95 | return ( 96 |
      97 | {items.map(k => )} 98 |
    99 | ) 100 | } 101 | 102 | function HomePage() { 103 | return ( 104 |
    105 | 106 |

    107 | 108 | 109 | 110 |
    111 | ) 112 | } 113 | 114 | website.page('/', HomePage) 115 | website.page('/bucket/{bucketKey}', BucketPage) 116 | 117 | export async function main() { 118 | openBrowser(website.url) 119 | } 120 | -------------------------------------------------------------------------------- /examples/websites/react-bucket/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cohesible/synapse-react": "spr:#synapse-react", 4 | "react": "18.3.1" 5 | } 6 | } -------------------------------------------------------------------------------- /examples/websites/react-bucket/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "jsx": "react-jsx", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext" 7 | }, 8 | "include": ["app.tsx"] 9 | } -------------------------------------------------------------------------------- /examples/workflows/README.md: -------------------------------------------------------------------------------- 1 | ## Workflows 2 | 3 | A "workflow" is a kind of durable code execution. You can imagine it as a series of "checkpoints" that you only want to successfully execute once for a given job, regardless of any subsequent failures. 4 | 5 | Synapse has an **experimental** workflows API that is not vended apart of the main CLI. 6 | 7 | The API takes inspiration from [Temporal](https://temporal.io/) but compatibility is a non-goal. 8 | 9 | The biggest difference between Temporal and this API is that the Synapse-based API relies entirely on your infrastructure. There is no hosted service. 10 | 11 | ### Usage 12 | 13 | The API package needs to be built and published on your machine: 14 | 15 | ```shell 16 | (cd sdk && synapse compile && synapse publish --local) 17 | ``` 18 | 19 | See [this simple example](./simple/main.ts) to try it out. 20 | 21 | -------------------------------------------------------------------------------- /examples/workflows/sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "workflows", 3 | "exports": "./main.ts", 4 | "synapse": { 5 | "config": { 6 | "sharedLib": true, 7 | "exposeInternal": true 8 | } 9 | } 10 | } -------------------------------------------------------------------------------- /examples/workflows/simple/main.ts: -------------------------------------------------------------------------------- 1 | import { checkpoint, createWorkflowService } from '@cohesible/workflows' 2 | import { Bucket, Table } from 'synapse:srl/storage' 3 | 4 | const db = new Table() 5 | 6 | // A "checkpoint" is a function you want to execute _once_, to completion. 7 | // 8 | // Checkpoint results are saved so that even if later code fails, we can 9 | // continue where we left off. 10 | const setTimestamp = checkpoint(async (key: string) => { 11 | const timestamp = Date.now() 12 | await db.set(key, timestamp) 13 | 14 | return { timestamp } 15 | }) 16 | 17 | const bucket = new Bucket() 18 | const saveData = checkpoint(async (key: string, data: string) => { 19 | if (data === 'invalid') { 20 | throw new Error('Received invalid data') 21 | } 22 | 23 | await bucket.put(key, data) 24 | }) 25 | 26 | // Checkpoints are only usable inside of other checkpoints and "workflows", which are 27 | // simply root-level checkpoints tied to particular infrastructure. 28 | // 29 | // We will be running our checkpoints on the infrastructure provided by `createWorkflowService` 30 | const workflows = createWorkflowService() 31 | 32 | // The function passed to `register` becomes a root-level checkpointed workflow. 33 | // A workflow behaves just like any other function except that it respects checkpoints. 34 | // 35 | // Repeated runs of the same workflow job will execute the same code but will skip over 36 | // previously successful checkpoints. 37 | const saveTimeAndData = workflows.register(async (req: { key: string, data: string }) => { 38 | const result = await setTimestamp(req.key) 39 | await saveData(req.key, req.data) 40 | 41 | return result 42 | }) 43 | 44 | export async function main() { 45 | // Checkpoints persist results for a given `workflowJobId`. 46 | // 47 | // This example fails at `saveData` when `data` is equal to 'invalid' 48 | // Try running this example with `synapse run` and note "current time" 49 | 50 | const workflowJobId = 'run-fail' 51 | const data = 'invalid' 52 | 53 | console.log('current time: ', new Date().toISOString()) 54 | console.log() 55 | 56 | try { 57 | const result = await saveTimeAndData.run(workflowJobId, { key: 'foo', data }) 58 | console.log('timestamp: ', new Date(result.timestamp).toISOString()) 59 | } catch (e) { 60 | // Now change 'invalid' to 'not invalid' and run again 61 | // 62 | // You'll see that `timestamp` is _before_ "current time" because we already 63 | // executed that step from our failed attempt. 64 | console.error('Workflow failed:\n', e) 65 | } 66 | } -------------------------------------------------------------------------------- /examples/workflows/simple/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cohesible/workflows": "spr:#workflows" 4 | } 5 | } -------------------------------------------------------------------------------- /integrations/aws/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse-aws", 3 | "dependencies": { 4 | "@aws-sdk/client-cloudfront": "^3.380.0", 5 | "@aws-sdk/client-cloudwatch": "^3.445.0", 6 | "@aws-sdk/client-cloudwatch-logs": "^3.445.0", 7 | "@aws-sdk/client-dynamodb": "^3.379.1", 8 | "@aws-sdk/client-ec2": "^3.379.1", 9 | "@aws-sdk/client-ecr": "^3.379.1", 10 | "@aws-sdk/client-ecr-public": "^3.388.0", 11 | "@aws-sdk/client-ecs": "^3.379.1", 12 | "@aws-sdk/client-eventbridge": "^3.379.1", 13 | "@aws-sdk/client-kinesis": "^3.379.1", 14 | "@aws-sdk/client-lambda": "^3.379.1", 15 | "@aws-sdk/client-organizations": "^3.379.1", 16 | "@aws-sdk/client-s3": "^3.379.1", 17 | "@aws-sdk/client-secrets-manager": "^3.379.1", 18 | "@aws-sdk/client-sqs": "^3.379.1", 19 | "@aws-sdk/s3-request-presigner": "^3.574.0" 20 | }, 21 | "exports": { 22 | ".": "./src/index.ts", 23 | "./permissions": "./src/permissions.ts", 24 | "./*": "./src/services/*.ts" 25 | }, 26 | "types": "./src/index.d.ts", 27 | "synapse": { 28 | "config": { 29 | "sharedLib": true, 30 | "exposeInternal": true 31 | }, 32 | "providers": { 33 | "aws": "*" 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /integrations/aws/src/permissions.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import { Provider } from './index' 3 | 4 | export interface Statement { 5 | Effect: 'Allow' | 'Deny' 6 | Action: string | string[] 7 | Resource: string | string[] 8 | Condition?: any 9 | } 10 | 11 | class IamContext { 12 | static readonly [core.contextType] = 'aws-iam' 13 | public readonly statements: Statement[] = [] 14 | } 15 | 16 | type LifecycleStage = 'create' | 'update' | 'read' | 'delete' 17 | 18 | interface ResourceStatement extends ArnProps { 19 | action: string | string[] 20 | deny?: boolean 21 | condition?: any 22 | lifecycle?: LifecycleStage[] 23 | } 24 | 25 | export function addStatement(statement: Statement) { 26 | const ctx = core.maybeGetContext(IamContext) 27 | if (ctx) { 28 | ctx.statements.push(statement) 29 | } 30 | } 31 | 32 | core.stubWhenBundled(addStatement) 33 | 34 | export function addResourceStatement(statement: ResourceStatement, receiver?: any) { 35 | const ctx = core.maybeGetContext(IamContext) 36 | if (ctx) { 37 | const prefixAction = (action: string) => action === '*' || action.includes(':') 38 | ? action 39 | : `${statement.service}:${action}` 40 | 41 | const action = Array.isArray(statement.action) 42 | ? statement.action.map(prefixAction) 43 | : prefixAction(statement.action) 44 | 45 | const boundCtx = receiver ? core.getBoundContext(receiver, Provider) : undefined 46 | 47 | ctx.statements.push({ 48 | Effect: statement.deny ? 'Deny' : 'Allow', 49 | Action: action, 50 | Resource: getArn(statement, boundCtx), 51 | Condition: statement.condition, 52 | }) 53 | } 54 | } 55 | 56 | core.stubWhenBundled(addResourceStatement) 57 | 58 | export function getPermissions(target: any) { 59 | const ctx = new IamContext() 60 | core.using(ctx, () => core.symEval(target)) 61 | 62 | return ctx.statements 63 | } 64 | 65 | core.stubWhenBundled(getPermissions) 66 | 67 | export function getPermissionsLater(target: any, fn: (statements: Statement[]) => void) { 68 | core.defer(() => void fn(getPermissions(target))) 69 | } 70 | 71 | core.stubWhenBundled(getPermissionsLater) 72 | 73 | interface ArnProps { 74 | partition?: string 75 | service: string 76 | region?: string 77 | account?: string 78 | resource?: string 79 | } 80 | 81 | function getArn(props: ArnProps, ctx?: Provider) { 82 | const resource = props.resource?.toString() 83 | if (resource?.startsWith('arn:')) { 84 | return props.resource 85 | } 86 | 87 | ctx ??= core.getContext(Provider) 88 | 89 | return [ 90 | 'arn', 91 | props.partition ?? ctx.partition, 92 | props.service, 93 | props.region ?? ctx.regionId, 94 | props.account ?? ctx.accountId, 95 | resource ?? '*', 96 | ].join(':') 97 | } -------------------------------------------------------------------------------- /integrations/aws/src/services/certificate-manager.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as aws from 'synapse-provider:aws' 3 | import { HostedZone } from './route53' 4 | import { addResourceStatement } from '../permissions' 5 | 6 | export class Certificate { 7 | public readonly resource: aws.AcmCertificate 8 | 9 | // This can be used to force other resources to wait for validation: 10 | // `core.addDependencies(myResource, cert.validation)` 11 | public readonly validation: aws.AcmCertificateValidation 12 | 13 | //private static provider?: InstanceType 14 | public constructor(props: { name?: string, zone: HostedZone }) { 15 | const { name, zone } = props 16 | 17 | // this needs to be in us-east-1 to validate under a domain 18 | const provider = new aws.AwsProvider({ region: 'us-east-1' }) 19 | const domainName = name ? `${name}.${zone.resource.name}` : zone.name 20 | 21 | const { cert, validation } = core.using(provider, () => { 22 | const cert = new aws.AcmCertificate({ 23 | domainName, 24 | validationMethod: 'DNS', 25 | }) 26 | 27 | // TODO: get count??? 28 | const dvo = cert.domainValidationOptions[0] 29 | const validationRecord = new aws.Route53Record({ 30 | name: dvo.resourceRecordName, 31 | zoneId: zone.resource.zoneId, 32 | allowOverwrite: true, 33 | type: dvo.resourceRecordType, 34 | records: [dvo.resourceRecordValue], 35 | ttl: 60, 36 | }) 37 | 38 | const validation = new aws.AcmCertificateValidation({ 39 | certificateArn: cert.arn, 40 | validationRecordFqdns: [validationRecord.fqdn], 41 | }) 42 | 43 | return { cert, validation } 44 | }) 45 | 46 | this.resource = cert 47 | this.validation = validation 48 | } 49 | } 50 | 51 | // https://docs.aws.amazon.com/acm/latest/userguide/authen-apipermissions.html 52 | core.bindConstructorModel(aws.AcmCertificate, function () { 53 | const addCertStatement = (action: string | string[], lifecycle: 'create' | 'update' | 'delete' | 'read', resource = '*') => { 54 | addResourceStatement({ 55 | service: 'acm', 56 | action, 57 | lifecycle: [lifecycle], 58 | resource: `certificate/${resource}` 59 | }, this) 60 | } 61 | 62 | addCertStatement('RequestCertificate', 'create') 63 | addCertStatement('UpdateCertificateOptions', 'update', this.id) 64 | addCertStatement('DeleteCertificate', 'delete', this.id) 65 | addCertStatement(['GetCertificate', 'DescribeCertificate'], 'read', this.id) 66 | }) 67 | -------------------------------------------------------------------------------- /integrations/aws/src/services/clients.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import { Provider, getManagementRoleArn } from '..' 3 | import { RegionInputConfig } from '@smithy/config-resolver' 4 | import { AwsAuthInputConfig } from '@aws-sdk/middleware-signing' 5 | import { assumeRole } from './iam' 6 | import { Account } from './organizations' 7 | import { addStatement } from '../permissions' 8 | 9 | function createCredentialsProvider(roleArn: string) { 10 | return async function getCredentials() { 11 | const resp = await assumeRole(roleArn) 12 | 13 | return { 14 | accessKeyId: resp.Credentials!.AccessKeyId!, 15 | secretAccessKey: resp.Credentials!.SecretAccessKey!, 16 | sessionToken: resp.Credentials!.SessionToken, 17 | expiration: resp.Credentials!.Expiration, 18 | } 19 | } 20 | } 21 | 22 | type ClientConfig = RegionInputConfig & AwsAuthInputConfig 23 | export function createClient(Client: new (config: ClientConfig) => T, opt?: { roleArn?: string }): T { 24 | const ctx = core.getContext(Provider) 25 | const roleArn = opt?.roleArn ?? ctx.roleArn 26 | const config: ClientConfig = { 27 | region: ctx.regionId, 28 | credentials: roleArn ? createCredentialsProvider(roleArn) : undefined, 29 | } 30 | 31 | return new Client(config) 32 | } 33 | 34 | export function createCrossAccountClient(account: Account, Client: new (config: ClientConfig) => T): T { 35 | const roleArn = getManagementRoleArn(account) 36 | 37 | core.bindModel( 38 | Client, 39 | Object.fromEntries( 40 | Object.keys(Client.prototype).map(k => [k, function () { 41 | addStatement({ Effect: 'Allow', Action: 'sts:AssumeRole', Resource: roleArn }) 42 | }] as const) 43 | ) as any 44 | ) 45 | 46 | return core.using( 47 | Provider.fromAccount(account), 48 | () => createClient(Client, { roleArn }) 49 | ) 50 | } -------------------------------------------------------------------------------- /integrations/aws/src/services/cloudwatch-logs.ts: -------------------------------------------------------------------------------- 1 | import * as CloudWatchLogs from '@aws-sdk/client-cloudwatch-logs' 2 | 3 | // ResourceNotFoundException -> returns 400 but should be 404 4 | 5 | export async function listLogStreams(group: string, limit?: number, region?: string) { 6 | const client = new CloudWatchLogs.CloudWatchLogs({ region }) 7 | const resp = await client.describeLogStreams({ 8 | logGroupName: group, 9 | orderBy: 'LastEventTime', 10 | limit, 11 | descending: true, 12 | }) 13 | 14 | return resp.logStreams ?? [] 15 | } 16 | 17 | 18 | export async function getLogEvents(group: string, stream: string, region?: string) { 19 | const client = new CloudWatchLogs.CloudWatchLogs({ region }) 20 | const resp = await client.getLogEvents({ 21 | logGroupName: group, 22 | logStreamName: stream, 23 | startFromHead: true, 24 | }) 25 | 26 | return resp.events ?? [] 27 | } 28 | -------------------------------------------------------------------------------- /integrations/aws/src/services/event-bridge.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as EventBridge from '@aws-sdk/client-eventbridge' 3 | import * as aws from 'synapse-provider:aws' 4 | import * as compute from 'synapse:srl/compute' 5 | import { LambdaFunction } from './lambda' 6 | import { Role, spPolicy } from './iam' 7 | 8 | export class ScheduledExecution { 9 | private readonly client = new EventBridge.EventBridge({}) 10 | 11 | public constructor() { } 12 | } 13 | 14 | export class Schedule { 15 | private readonly resource: aws.SchedulerSchedule 16 | 17 | public constructor(expression: string, fn: () => void) { 18 | const lambda = new LambdaFunction(fn) 19 | const role = new Role({ 20 | assumeRolePolicy: JSON.stringify(spPolicy("scheduler.amazonaws.com")), 21 | inlinePolicy: [ 22 | { 23 | name: 'SchedulerInvokePolicy', 24 | policy: JSON.stringify({ 25 | Version: "2012-10-17", 26 | Statement: [{ 27 | Action: ['lambda:InvokeFunction'], 28 | Resource: [lambda.resource.arn], 29 | Effect: 'Allow', 30 | }], 31 | }) 32 | } 33 | ] 34 | }) 35 | 36 | this.resource = new aws.SchedulerSchedule({ 37 | scheduleExpression: expression, 38 | target: { 39 | arn: lambda.resource.arn, 40 | roleArn: role.resource.arn, 41 | }, 42 | flexibleTimeWindow: { 43 | mode: 'OFF' 44 | } 45 | }) 46 | } 47 | 48 | 49 | public on(event: 'tick', fn: () => void): void { 50 | throw new Error('TODO') 51 | } 52 | } 53 | 54 | core.addTarget(compute.Schedule, Schedule, 'aws') 55 | 56 | -------------------------------------------------------------------------------- /integrations/aws/src/services/organizations.ts: -------------------------------------------------------------------------------- 1 | import * as aws from 'synapse-provider:aws' 2 | import * as core from 'synapse:core' 3 | import * as Organizations from '@aws-sdk/client-organizations' 4 | 5 | export class Organization { 6 | public get id() { 7 | return this.resource.id 8 | } 9 | 10 | // https://docs.aws.amazon.com/organizations/latest/userguide/orgs_getting-started_concepts.html 11 | // "Currently, you can have only one root. AWS Organizations automatically creates it for you when you create an organization." 12 | public get root() { 13 | return this.resource.roots[0] 14 | } 15 | 16 | public readonly resource: aws.OrganizationsOrganization 17 | public constructor() { 18 | this.resource = new aws.OrganizationsOrganization({ 19 | featureSet: 'ALL' 20 | }) 21 | } 22 | 23 | public static import(id: string) { 24 | throw new Error('TODO') 25 | } 26 | } 27 | 28 | function getParentId(parent: Organization | OrganizationalUnit) { 29 | if (parent instanceof Organization) { 30 | return parent.root.id 31 | } 32 | 33 | return parent.id 34 | } 35 | 36 | interface OrganizationalUnitProps { 37 | readonly name: string 38 | readonly parent: Organization | OrganizationalUnit 39 | } 40 | 41 | export class OrganizationalUnit { 42 | public get id() { 43 | return this.resource.id 44 | } 45 | 46 | public readonly resource: aws.OrganizationsOrganizationalUnit 47 | public constructor(props: OrganizationalUnitProps) { 48 | this.resource = new aws.OrganizationsOrganizationalUnit({ 49 | name: props.name, 50 | parentId: getParentId(props.parent) 51 | }) 52 | } 53 | } 54 | 55 | interface AccountProps { 56 | readonly name: string 57 | readonly email: string 58 | readonly parent?: Organization | OrganizationalUnit 59 | readonly closeOnDeletion?: boolean 60 | 61 | /** @internal */ 62 | readonly id?: string 63 | } 64 | 65 | export class Account { 66 | private readonly resource?: aws.OrganizationsAccount 67 | public readonly id: string 68 | public readonly managementRoleName: string 69 | 70 | public constructor(props: AccountProps) { 71 | if (!props.id) { 72 | this.resource = new aws.OrganizationsAccount({ 73 | name: props.name, 74 | email: props.email, 75 | closeOnDeletion: props.closeOnDeletion, 76 | iamUserAccessToBilling: 'ALLOW', // 'DENY' 77 | parentId: props.parent ? getParentId(props.parent) : undefined, 78 | }) 79 | this.id = this.resource.id 80 | } else { 81 | this.id = props.id 82 | } 83 | 84 | this.managementRoleName = 'OrganizationAccountAccessRole' 85 | } 86 | 87 | public static import(id: string) { 88 | const account = importAccount({ id }) 89 | 90 | return new Account({ 91 | id: account.id!, 92 | name: account.name!, 93 | email: account.email!, 94 | }) 95 | } 96 | } 97 | 98 | type UncapitalizeObject = { [P in Uncapitalize]: T[Capitalize

    & keyof T] } 99 | 100 | function uncapitalize(o: T): UncapitalizeObject { 101 | return Object.fromEntries( 102 | Object.entries(o).map(([k, v]) => [k[0].toLowerCase().concat(k.slice(1)), v]) 103 | ) as any 104 | } 105 | 106 | const importAccount = core.defineDataSource(async (identity: { id: string }) => { 107 | const client = new Organizations.Organizations({}) 108 | const resp = await client.describeAccount({ AccountId: identity.id }) 109 | 110 | return uncapitalize(resp.Account!) 111 | }) -------------------------------------------------------------------------------- /integrations/aws/src/services/route53.ts: -------------------------------------------------------------------------------- 1 | import * as assert from 'assert' 2 | import * as core from 'synapse:core' 3 | import * as aws from 'synapse-provider:aws' 4 | import * as net from 'synapse:srl/net' 5 | 6 | export class HostedZone implements net.HostedZone { 7 | public constructor( 8 | public readonly name: string, 9 | public readonly resource = new aws.Route53Zone({ name }) 10 | ) { 11 | } 12 | 13 | public createSubzone(name: string): net.HostedZone { 14 | const qualifiedName = `${name}.${this.name}` 15 | const subdomain = new HostedZone(qualifiedName) 16 | 17 | new aws.Route53Record({ 18 | name: qualifiedName, 19 | ttl: 30, 20 | type: 'NS', 21 | zoneId: this.resource.zoneId, 22 | records: subdomain.resource.nameServers, 23 | }) 24 | 25 | return subdomain 26 | } 27 | 28 | public createSubdomain(name: string): net.HostedZone { 29 | const qualifiedName = `${name}.${this.name}` 30 | 31 | return new HostedZone(qualifiedName, this.resource) 32 | } 33 | 34 | public createRecord(record: net.ResourceRecord) { 35 | new aws.Route53Record({ 36 | name: record.name, 37 | ttl: record.ttl, 38 | type: record.type, 39 | zoneId: this.resource.zoneId, 40 | records: [record.value], 41 | }) 42 | } 43 | } 44 | 45 | core.addTarget(net.HostedZone, HostedZone, 'aws') 46 | -------------------------------------------------------------------------------- /integrations/aws/src/services/secrets-manager.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as SecretsManager from '@aws-sdk/client-secrets-manager' 3 | import * as aws from 'synapse-provider:aws' 4 | import * as storage from 'synapse:srl/storage' 5 | 6 | export class Secret { 7 | private readonly client = new SecretsManager.SecretsManager({}) 8 | public readonly resource: aws.SecretsmanagerSecret 9 | 10 | public constructor(envVar?: string) { 11 | this.resource = new aws.SecretsmanagerSecret({}) 12 | if (envVar) { 13 | new SecretUploader(this, envVar) 14 | } 15 | } 16 | 17 | public async get(): Promise { 18 | const resp = await this.client.getSecretValue({ 19 | SecretId: this.resource.id, 20 | }) 21 | 22 | if (!resp.SecretString) { 23 | throw new Error('secret value must be a string') 24 | } 25 | 26 | return JSON.parse(resp.SecretString) 27 | } 28 | 29 | public async put(val: T): Promise<{ version: string }> { 30 | const resp = await this.client.putSecretValue({ 31 | SecretId: this.resource.id, 32 | SecretString: JSON.stringify(val), 33 | }) 34 | 35 | return { version: resp.VersionId! } 36 | } 37 | } 38 | 39 | const SecretUploader = core.defineResource({ 40 | read: async (state: { secret: Secret, version: string }) => state, 41 | create: async (secret: Secret, envVar: string) => { 42 | const val = process.env[envVar] 43 | if (!val) { 44 | throw new Error(`Environment variable is not set: ${envVar}`) 45 | } 46 | const resp = await secret.put(envVar) 47 | 48 | return { secret, ...resp } 49 | }, 50 | 51 | delete: async (state) => void await state.secret.put('') 52 | }) 53 | 54 | core.addTarget(storage.Secret, Secret, 'aws') 55 | // core.bindModel(SecretsManager.SecretsManager, { 56 | // 'getSecretValue': { 57 | // 'Effect': 'Allow', 58 | // 'Action': 'secretsmanager:GetSecretValue', 59 | // 'Resource': 'arn:{context.Partition}:secretsmanager:{context.Region}:{context.Account}:secret:{0.SecretId}' 60 | // }, 61 | // 'createSecret': { 62 | // 'Effect': 'Allow', 63 | // 'Action': 'secretsmanager:CreateSecret', 64 | // 'Resource': '*' 65 | // }, 66 | // 'deleteSecret': { 67 | // 'Effect': 'Allow', 68 | // 'Action': 'secretsmanager:DeleteSecret', 69 | // 'Resource': 'arn:{context.Partition}:secretsmanager:{context.Region}:{context.Account}:secret:{0.SecretId}' 70 | // }, 71 | // 'updateSecret': { 72 | // 'Effect': 'Allow', 73 | // 'Action': 'secretsmanager:UpdateSecret', 74 | // 'Resource': 'arn:{context.Partition}:secretsmanager:{context.Region}:{context.Account}:secret:{0.SecretId}' 75 | // }, 76 | // 'listSecrets': { 77 | // 'Effect': 'Allow', 78 | // 'Action': 'secretsmanager:ListSecrets', 79 | // 'Resource': '*' 80 | // }, 81 | // 'putSecretValue': { 82 | // 'Effect': 'Allow', 83 | // 'Action': 'secretsmanager:PutSecretValue', 84 | // 'Resource': 'arn:{context.Partition}:secretsmanager:{context.Region}:{context.Account}:secret:{0.SecretId}' 85 | // } 86 | // }) 87 | -------------------------------------------------------------------------------- /integrations/aws/src/sigv4.ts: -------------------------------------------------------------------------------- 1 | import { SignatureV4 } from '@smithy/signature-v4' 2 | import { Hash } from '@smithy/hash-node' 3 | import { HttpRequest } from "@smithy/protocol-http" 4 | import { defaultProvider } from '@aws-sdk/credential-provider-node' 5 | 6 | export interface Credentials { 7 | readonly accessKeyId: string 8 | readonly secretAccessKey: string 9 | readonly sessionToken?: string 10 | } 11 | 12 | interface SignerContext { 13 | readonly region: string 14 | readonly service: string 15 | readonly credentials?: Credentials 16 | } 17 | 18 | export async function signRequest(ctx: SignerContext, request: HttpRequest) { 19 | const signer = new SignatureV4({ 20 | ...ctx, 21 | credentials: defaultProvider(), 22 | sha256: Hash.bind(null, 'sha256'), 23 | }) 24 | 25 | return signer.sign(request) 26 | } 27 | 28 | -------------------------------------------------------------------------------- /integrations/aws/src/utils.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs' 2 | import * as path from 'node:path' 3 | import * as crypto from 'node:crypto' 4 | 5 | export function* listFiles(directory: string): Generator { 6 | const files = fs.readdirSync(directory, { withFileTypes: true }) 7 | for (const f of files) { 8 | const p = path.join(directory, f.name) 9 | if (f.isFile()) { 10 | yield p 11 | } else if (f.isDirectory()) { 12 | yield* listFiles(p) 13 | } 14 | } 15 | } 16 | 17 | export function getFileHash(p: string) { 18 | const data = fs.readFileSync(p, 'utf-8') 19 | 20 | return crypto.createHash('sha512').update(data).digest('hex') 21 | } 22 | 23 | export function getDataHash(data: string | Buffer) { 24 | return crypto.createHash('sha512').update(data).digest('hex') 25 | } 26 | -------------------------------------------------------------------------------- /integrations/aws/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist", 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | }, 8 | "include": ["src"] 9 | } 10 | -------------------------------------------------------------------------------- /integrations/local/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse-local", 3 | "exports": { 4 | ".": "./src/index.ts" 5 | }, 6 | "synapse": { 7 | "config": { 8 | "sharedLib": true, 9 | "exposeInternal": true 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /integrations/local/src/cdn.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as storage from 'synapse:srl/storage' 3 | import * as compute from 'synapse:srl/compute' 4 | 5 | export class CDN implements storage.CDN { 6 | public readonly url: string 7 | 8 | constructor(props: { bucket: storage.Bucket }) { 9 | const service = new compute.HttpService({ auth: 'none' }) 10 | service.route('GET', '/{key+}', async req => { 11 | const key = req.pathParameters.key 12 | const [data, metadata] = await Promise.all([ 13 | props.bucket.get(key), 14 | props.bucket.stat(key) 15 | ]) 16 | 17 | const contentType = metadata?.contentType ?? 'application/octet-stream' 18 | 19 | return new Response(data, { 20 | headers: { 21 | 'content-type': contentType 22 | } 23 | }) 24 | }) 25 | 26 | this.url = service.invokeUrl 27 | } 28 | 29 | addOrigin(origin: storage.OriginOptions): void { 30 | 31 | } 32 | } 33 | 34 | core.addTarget(storage.CDN, CDN, 'local') 35 | -------------------------------------------------------------------------------- /integrations/local/src/counter.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as storage from 'synapse:srl/storage' 3 | import * as compute from 'synapse:srl/compute' 4 | import { randomUUID } from 'node:crypto' 5 | 6 | export class Counter implements storage.Counter { 7 | private readonly bucket = new storage.Bucket() 8 | 9 | constructor(private readonly init: number = 0) { 10 | 11 | } 12 | 13 | async get(): Promise { 14 | const data = await this.bucket.get('counter', 'utf-8') 15 | 16 | return data !== undefined ? JSON.parse(data) : this.init 17 | } 18 | 19 | async set(amount: number): Promise { 20 | const currentVal = await this.get() 21 | await this.bucket.put('counter', JSON.stringify(amount)) 22 | 23 | return currentVal 24 | } 25 | 26 | async inc(amount = 1): Promise { 27 | const currentVal = await this.get() 28 | const newVal = currentVal + amount 29 | await this.set(newVal) 30 | 31 | return newVal 32 | } 33 | } 34 | 35 | core.addTarget(storage.Counter, Counter, 'local') 36 | 37 | export class SimpleLock { 38 | private readonly bucket = new storage.Bucket() 39 | 40 | async lock(id: string, timeout?: number) { 41 | let sleepTime = 1 42 | 43 | while (true) { 44 | const l = await this.tryLock(id) 45 | if (l) { 46 | return l 47 | } 48 | 49 | await new Promise(r => setTimeout(r, sleepTime)) 50 | 51 | if (sleepTime < 100) { 52 | sleepTime = Math.round((1 + Math.random()) * sleepTime) 53 | } 54 | } 55 | } 56 | 57 | async tryLock(id: string) { 58 | const currentState = await this.bucket.get(id, 'utf-8') 59 | if (currentState && currentState !== '0') { 60 | return 61 | } 62 | 63 | const lockId = randomUUID() 64 | await this.bucket.put(id, lockId) 65 | const c = await this.bucket.get(id, 'utf-8') 66 | 67 | if (c === lockId) { 68 | return { 69 | id, 70 | [Symbol.asyncDispose]: () => this.unlock(id), 71 | } 72 | } 73 | } 74 | 75 | async unlock(id: string) { 76 | await this.bucket.put(id, '0') 77 | } 78 | 79 | async clear() { 80 | for (const k of await this.bucket.list()) { 81 | await this.bucket.delete(k) 82 | } 83 | } 84 | } 85 | 86 | core.addTarget(compute.SimpleLock, SimpleLock, 'local') 87 | -------------------------------------------------------------------------------- /integrations/local/src/function.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as compute from 'synapse:srl/compute' 3 | import * as lib from 'synapse:lib' 4 | 5 | // TODO: could be optimized 6 | function wrapWithTimeout(fn: (...args: any[]) => any, seconds: number): typeof fn { 7 | return (...args) => { 8 | const timeout = new Promise((_, reject) => { 9 | setTimeout(() => reject(new Error('Timed out')), seconds * 1000).unref() 10 | }) 11 | 12 | return Promise.race([fn(...args), timeout]) 13 | } 14 | } 15 | 16 | export class LocalFunction implements compute.Function { 17 | private readonly dest: string 18 | 19 | constructor(target: (...args: T) => Promise | U, opt?: compute.FunctionOptions) { 20 | if (opt?.timeout) { 21 | if (opt.timeout <= 0) { 22 | throw new Error(`A Function's timeout must be greater than 0`) 23 | } 24 | target = wrapWithTimeout(target, opt.timeout) 25 | } 26 | 27 | const bundle = new lib.Bundle(target, { external: opt?.external }) 28 | this.dest = bundle.destination 29 | } 30 | 31 | public async invoke(...args: T): Promise { 32 | const fn = require(this.dest).default 33 | 34 | return fn(...args) 35 | } 36 | 37 | public async invokeAsync(...args: T): Promise { 38 | const fn = require(this.dest).default 39 | fn(...args) 40 | } 41 | } 42 | 43 | export interface LocalFunction { 44 | (...args: T): Promise 45 | } 46 | 47 | core.addTarget(compute.Function, LocalFunction, 'local') 48 | -------------------------------------------------------------------------------- /integrations/local/src/index.ts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cohesible/synapse/45e07b9e05db839317e74931f165370db6ae9b9c/integrations/local/src/index.ts -------------------------------------------------------------------------------- /integrations/local/src/provider.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as srl from 'synapse:srl' 3 | import * as path from 'node:path' 4 | import { homedir } from 'node:os' 5 | 6 | // TODO: add a process manager 7 | 8 | export class Provider implements srl.Provider { 9 | static readonly [core.contextType] = 'local' 10 | } 11 | 12 | export const getLocalPath = core.defineDataSource((suffix: string) => { 13 | const synapseDir = process.env['SYNAPSE_INSTALL'] || path.resolve(homedir(), '.synapse') 14 | 15 | return path.resolve(synapseDir, 'local', suffix) 16 | }) 17 | 18 | core.addTarget(srl.Provider, Provider, 'local') 19 | -------------------------------------------------------------------------------- /integrations/local/src/queue.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as storage from 'synapse:srl/storage' 3 | import * as lib from 'synapse:lib' 4 | 5 | interface State { 6 | buffer: T[] 7 | listener?: string 8 | } 9 | 10 | async function getState(bucket: storage.Bucket): Promise> { 11 | const data = await bucket.get('state', 'utf-8') 12 | 13 | return data !== undefined ? JSON.parse(data) : {} 14 | } 15 | 16 | class QueueListener extends core.defineResource({ 17 | create: async (bucket: storage.Bucket, pointer: string) => { 18 | const state = await getState(bucket) 19 | if (state.listener) { 20 | throw new Error(`A queue event listener already exists`) 21 | } 22 | 23 | state.listener = pointer 24 | await bucket.put('state', Buffer.from(JSON.stringify(state))) 25 | }, 26 | update: async (_, bucket, pointer) => { 27 | const state = await getState(bucket) 28 | state.listener = pointer 29 | await bucket.put('state', Buffer.from(JSON.stringify(state))) 30 | }, 31 | delete: async (_, bucket) => { 32 | const state = await getState(bucket) 33 | state.listener = undefined 34 | await bucket.put('state', Buffer.from(JSON.stringify(state))) 35 | }, 36 | }) {} 37 | 38 | export class Queue implements storage.Queue { 39 | private readonly bucket = new storage.Bucket() 40 | 41 | constructor() {} 42 | private async setState(state: State): Promise { 43 | await this.bucket.put('state', Buffer.from(JSON.stringify(state))) 44 | } 45 | 46 | async send(val: T) { 47 | const state = await getState(this.bucket) 48 | if (state.listener) { 49 | const fn = require(state.listener).default 50 | 51 | return fn(val) 52 | } 53 | 54 | state.buffer.push(val) 55 | await this.setState(state) 56 | } 57 | 58 | async consume(fn: (val: T) => U | Promise): Promise { 59 | const state = await getState(this.bucket) 60 | if (state.buffer.length === 0) { 61 | throw new Error(`No events to consume`) 62 | } 63 | 64 | const ev = state.buffer.shift()! 65 | const res = await fn(ev) 66 | await this.setState(state) // only update the state if the cb succeeds 67 | 68 | return res 69 | } 70 | 71 | on(event: 'message', listener: (ev: T) => Promise | void) { 72 | const bundle = new lib.Bundle(listener) 73 | const resolved = lib.resolveArtifact(bundle.destination as any, bundle.extname) 74 | new QueueListener(this.bucket, resolved.filePath) 75 | } 76 | } 77 | 78 | 79 | core.addTarget(storage.Queue, Queue, 'local') 80 | -------------------------------------------------------------------------------- /integrations/local/src/table.ts: -------------------------------------------------------------------------------- 1 | import * as core from 'synapse:core' 2 | import * as storage from 'synapse:srl/storage' 3 | import { LocalKVStore } from './bucket' 4 | import { createHash } from 'node:crypto' 5 | 6 | function hashObj(o: any) { 7 | return createHash('sha256').update(JSON.stringify(o)).digest('hex') 8 | } 9 | 10 | export class Table implements storage.Table { 11 | private readonly resource = new LocalKVStore() 12 | 13 | public async get(key: K): Promise { 14 | const d = await this.resource.get(hashObj(key), 'utf-8') 15 | 16 | return d !== undefined ? JSON.parse(d) : undefined 17 | } 18 | 19 | getBatch(keys: K[]): Promise<{ key: K; value: V }[]> { 20 | return Promise.all(keys.map(key => this.get(key).then(value => ({ key, value: value! })))) 21 | } 22 | 23 | public async set(key: K, val: V): Promise { 24 | await this.resource.put(hashObj(key), Buffer.from(JSON.stringify(val), 'utf-8')) 25 | } 26 | 27 | async setBatch(items: { key: K; value: V }[]): Promise { 28 | await Promise.all(items.map(item => this.set(item.key, item.value))) 29 | } 30 | 31 | public async delete(key: K): Promise { 32 | await this.resource.delete(hashObj(key)) 33 | } 34 | 35 | async *values(): AsyncIterable { 36 | const keys = await this.resource.list() 37 | // FIXME: technically a file can get deleted while enumerating 38 | const values = await Promise.all(keys.map(k => this.resource.get(k).then(b => JSON.parse(b!.toString())))) 39 | yield values 40 | } 41 | 42 | async clear() { 43 | await this.resource.clear() 44 | } 45 | } 46 | 47 | core.addTarget(storage.Table, Table, 'local') 48 | 49 | // TODO: make this actually use `ttl` 50 | export class TTLCache implements storage.TTLCache { 51 | private readonly resource = new Table() 52 | 53 | public constructor(private readonly ttl: number = 3600) {} 54 | 55 | public async get(key: K): Promise { 56 | return this.resource.get(key) 57 | } 58 | 59 | public async put(key: K, value: V): Promise { 60 | await this.resource.set(key, value) 61 | } 62 | 63 | keys(): Promise { 64 | throw new Error('Method not implemented.') 65 | } 66 | 67 | public async delete(key: K): Promise { 68 | await this.resource.delete(key) 69 | } 70 | } 71 | 72 | core.addTarget(storage.TTLCache, TTLCache, 'local') 73 | -------------------------------------------------------------------------------- /integrations/local/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist", 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | "strict": true 8 | }, 9 | "include": ["src"] 10 | } 11 | -------------------------------------------------------------------------------- /integrations/preact/main.ts: -------------------------------------------------------------------------------- 1 | import * as preact from 'preact' 2 | import { render, renderToStringAsync } from 'preact-render-to-string' 3 | import type { HostedZone } from 'synapse:srl/net' 4 | import { JSXRuntime, createWebsiteHost, WebsiteHost } from '@cohesible/synapse-websites' 5 | 6 | const runtime: JSXRuntime = { 7 | createElement: preact.createElement, 8 | render: renderToStringAsync, 9 | mount: (container, children, opt) => { 10 | if (opt.rehydrate) { 11 | preact.hydrate(children, container) 12 | } else { 13 | preact.render(children, container) 14 | } 15 | 16 | return {} 17 | }, 18 | } 19 | 20 | export function createWebsite(props?: { domain?: HostedZone }): WebsiteHost { 21 | return createWebsiteHost(runtime, props) 22 | } -------------------------------------------------------------------------------- /integrations/preact/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse-preact", 3 | "exports": "./main.ts", 4 | "dependencies": { 5 | "preact": "^10.22.0", 6 | "preact-render-to-string": "^6.5.5", 7 | "@cohesible/synapse-websites": "spr:#synapse-websites" 8 | }, 9 | "synapse": { 10 | "config": { 11 | "sharedLib": true 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /integrations/preact/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist", 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | }, 8 | "include": ["main.ts"] 9 | } 10 | -------------------------------------------------------------------------------- /integrations/react/main.ts: -------------------------------------------------------------------------------- 1 | import * as stream from 'node:stream' 2 | import type { HostedZone } from 'synapse:srl/net' 3 | import { createRoot, hydrateRoot } from 'react-dom/client' 4 | import { renderToPipeableStream, renderToString } from 'react-dom/server' 5 | import { JSXRuntime, createWebsiteHost, WebsiteHost } from '@cohesible/synapse-websites' 6 | import { createElement } from 'react' 7 | 8 | const runtime: JSXRuntime = { 9 | createElement, 10 | mount: (container, children, opt) => { 11 | if (opt.rehydrate) { 12 | return hydrateRoot(container, children) 13 | } 14 | 15 | const root = createRoot(container as any) 16 | root.render(children) 17 | 18 | return root 19 | }, 20 | render: node => renderToString(node as React.ReactElement), 21 | renderStream: (node, opt) => { 22 | return new Promise((resolve, reject) => { 23 | const s = renderToPipeableStream(node, { 24 | ...opt, 25 | onShellError: reject, 26 | onShellReady: () => { 27 | const pass = new stream.PassThrough() // FIXME: why is this needed 28 | 29 | resolve(stream.Readable.toWeb(s.pipe(pass)) as any) 30 | }, 31 | }) 32 | }) 33 | }, 34 | } 35 | 36 | export function createWebsite(props?: { domain?: HostedZone }): WebsiteHost { 37 | return createWebsiteHost(runtime, props) 38 | } 39 | -------------------------------------------------------------------------------- /integrations/react/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse-react", 3 | "exports": "./main.ts", 4 | "dependencies": { 5 | "react": "^18.2.0", 6 | "react-dom": "^18.2.0", 7 | "@types/react": "^18.2.37", 8 | "@types/react-dom": "^18.2.15", 9 | "@cohesible/synapse-websites": "spr:#synapse-websites" 10 | }, 11 | "synapse": { 12 | "config": { 13 | "sharedLib": true 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /integrations/react/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist", 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | }, 8 | "include": ["main.ts"] 9 | } 10 | -------------------------------------------------------------------------------- /integrations/websites/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse-websites", 3 | "dependencies": {}, 4 | "exports": { 5 | ".": "./src/host.ts" 6 | }, 7 | "synapse": { 8 | "config": { 9 | "sharedLib": true, 10 | "exposeInternal": true 11 | } 12 | } 13 | } -------------------------------------------------------------------------------- /integrations/websites/src/runtime.ts: -------------------------------------------------------------------------------- 1 | interface JSXElement< 2 | P = any, 3 | T extends string | JSXElementConstructor

    = string | JSXElementConstructor

    4 | > { 5 | type: T 6 | props: P 7 | key: string | null 8 | } 9 | 10 | type JSXNode

    = JSXElement

    | Iterable> 11 | 12 | type JSXElementConstructor

    = (props?: P) => JSXNode

    13 | 14 | interface Component { 15 | render(): JSXElement 16 | } 17 | 18 | type FunctionComponent

    = (props: P) => JSXNode

    19 | export type ComponentType

    = (new (props: P) => Component) | FunctionComponent

    20 | 21 | const classSymbol = Symbol('__class__') 22 | function isClass

    (target: ComponentType

    ): target is new (props: P) => Component { 23 | const v = (target as any)[classSymbol] 24 | if (v !== undefined) { 25 | return v 26 | } 27 | 28 | const props = Object.getOwnPropertyNames(target) 29 | const isFunction = props.includes('arguments') || !props.includes('prototype') 30 | 31 | return (target as any)[classSymbol] = !isFunction 32 | } 33 | 34 | export function instantiate

    (component: ComponentType

    , props: P) { 35 | if (isClass(component)) { 36 | return new component(props) 37 | } 38 | 39 | return component(props) 40 | } 41 | 42 | interface MountedNode { 43 | unmount?(): void 44 | } 45 | 46 | interface MountOptions { 47 | rehydrate?: boolean 48 | } 49 | 50 | // TODO: combine 'render' functions into one 51 | // we should decide what to do based off the return value 52 | type MountFn = (container: Element | Document, children: T, opt: MountOptions) => MountedNode | void 53 | type RenderFn = (node: T) => Promise | string 54 | type RenderStreamFn = (node: T, opt?: { bootstrapScripts?: string[]} ) => Promise 55 | type CreateElement = (type: string | JSXElementConstructor, props?: any, ...children: JSXElement[]) => JSXElement 56 | 57 | export interface JSXContext { 58 | readonly Provider: ComponentType<{ value: T; children: U }> 59 | readonly Consumer: ComponentType<{ children: (value: T) => U }> 60 | } 61 | 62 | export interface JSXRuntime> { 63 | readonly createElement: CreateElement 64 | readonly mount: MountFn 65 | readonly render: RenderFn 66 | // readonly createContext: (value: any) => U 67 | readonly renderStream?: RenderStreamFn 68 | } 69 | -------------------------------------------------------------------------------- /integrations/websites/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist", 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "moduleResolution": "NodeNext", 7 | }, 8 | "include": ["src"] 9 | } 10 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "synapse", 3 | "version": "0.0.17", 4 | "bin": "./src/cli/index.ts", 5 | "dependencies": { 6 | "esbuild": "^0.20.2", 7 | "typescript": "~5.5.4", 8 | "postject": "github:Cohesible/postject" 9 | }, 10 | "devDependencies": { 11 | "@types/node": "^20.11.27", 12 | "@cohesible/auth": "file:packages/auth.tgz", 13 | "@cohesible/quotes": "file:packages/quotes.tgz", 14 | "@cohesible/resources": "file:packages/resources.tgz" 15 | }, 16 | "engines": { 17 | "node": "22.1.0" 18 | }, 19 | "scripts": { 20 | "compileSelf": "synapse compile --no-synth && synapse publish --local", 21 | "testFixtures": "synapse run src/testing/internal.ts -- \"$@\"" 22 | }, 23 | "synapse": { 24 | "config": { 25 | "exposeInternal": true, 26 | "target": "local" 27 | }, 28 | "binaryDependencies": { 29 | "node": "https://github.com/Cohesible/node.git", 30 | "terraform": "https://github.com/Cohesible/terraform.git" 31 | }, 32 | "devTools": { 33 | "zig": "0.13.0" 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /packages/auth.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cohesible/synapse/45e07b9e05db839317e74931f165370db6ae9b9c/packages/auth.tgz -------------------------------------------------------------------------------- /packages/quotes.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cohesible/synapse/45e07b9e05db839317e74931f165370db6ae9b9c/packages/quotes.tgz -------------------------------------------------------------------------------- /packages/resources.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cohesible/synapse/45e07b9e05db839317e74931f165370db6ae9b9c/packages/resources.tgz -------------------------------------------------------------------------------- /src/backendClient.ts: -------------------------------------------------------------------------------- 1 | import * as secrets from './services/secrets/index' 2 | import type { BackendClient } from './runtime/modules/core' 3 | import { getExecutionId } from './execution' 4 | import { readState } from './artifacts' 5 | import { getAuthClient } from './auth' 6 | import { getLogger } from './logging' 7 | import { mapResource } from './deploy/deployment' 8 | 9 | type ModulePointer> = T | string 10 | 11 | export type AuthSource = ModulePointer<{ default: (workspace: string, branch?: string) => Credentials | Promise }> 12 | export type LoadedBackendClient = ReturnType 13 | 14 | 15 | function getBootstrapClient() { 16 | const serverAddr = 'http://localhost:8681' 17 | const localClient: BackendClient = { 18 | config: {} as any, 19 | getMissingArtifacts: async () => ({ missing: [] }), 20 | getManifest: async () => ({ artifacts: {} }), 21 | putManifest: async () => {}, 22 | putArtifactsBatch: async () => {}, 23 | getArtifactsBatch: async () => ({}), 24 | getSecret: async (type: string) => { 25 | const envVar = type.toUpperCase().replaceAll('-', '_') 26 | if (process.env[envVar]) { 27 | return { value: process.env[envVar] } 28 | } 29 | 30 | throw new Error(`No secret found: ${type}`) 31 | }, 32 | } as any 33 | 34 | return Object.assign(localClient, { 35 | getState: async (id: string) => { 36 | getLogger().log(`Getting state for resource`, id) 37 | const state = await readState() 38 | const r = state?.resources?.find(x => id === `${x.type}.${x.name}`) 39 | const s = r ? mapResource(r)?.state.attributes : undefined 40 | if (!s) { 41 | return 42 | } 43 | 44 | return r!.type === 'synapse_resource' ? s.output.value : s 45 | }, 46 | getToolDownloadUrl: async () => { 47 | return {} as any 48 | }, 49 | getCredentials: async () => ({ 50 | token: '', 51 | expirationEpoch: 0, 52 | }) as any, 53 | config: { 54 | address: serverAddr, 55 | } as any, 56 | }) 57 | } 58 | 59 | function _getClient() { 60 | const identityClient = getAuthClient() 61 | 62 | return { 63 | ...identityClient, 64 | getSecret: secrets.getSecret, 65 | } as any as BackendClient 66 | } 67 | 68 | function _createBackendClient() { 69 | const client = _getClient() 70 | 71 | async function getToolDownloadUrl(type: string, opt?: { os?: string; arch?: string; version?: string }) { 72 | throw new Error('Not implemented') 73 | } 74 | 75 | return Object.assign(client, { 76 | getToolDownloadUrl, 77 | getSecret: (type: string) => { 78 | const envVar = type.toUpperCase().replaceAll('-', '_') 79 | if (process.env[envVar]) { 80 | return { value: process.env[envVar] } 81 | } 82 | 83 | return client.getSecret(type) 84 | } 85 | }) 86 | } 87 | 88 | const clients: Record> = {} 89 | export function getBackendClient(bootstrap = false) { 90 | const k = getExecutionId() 91 | if (clients[k]) { 92 | return clients[k] 93 | } 94 | 95 | if (bootstrap) { 96 | return clients[k] = getBootstrapClient() 97 | } 98 | 99 | try { 100 | return clients[k] = _createBackendClient() 101 | } catch { 102 | return clients[k] = getBootstrapClient() 103 | } 104 | } 105 | 106 | 107 | interface Credentials { 108 | token: string 109 | expirationEpoch: number 110 | } 111 | 112 | -------------------------------------------------------------------------------- /src/build-fs/backup.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { DataRepository, getDataRepository, readJsonRaw, Head } from '../artifacts' 3 | import { getFs, runWithContext, throwIfCancelled } from '../execution' 4 | import { createBlock } from './block' 5 | import { collectAllStats, mergeRepoStats } from './stats' 6 | 7 | export async function createIndexBackup(dest: string) { 8 | const repo = getDataRepository(getFs()) 9 | const indices = new Set() 10 | 11 | async function visitHead(h: Head, source: string) { 12 | indices.add(h.storeHash) 13 | if (h.programHash) { 14 | indices.add(h.programHash) 15 | const { index } = await repo.getBuildFs(h.programHash) 16 | if (index.dependencies) { 17 | Object.values(index.dependencies).forEach(d => indices.add(d)) 18 | } 19 | } 20 | 21 | if (h.previousCommit) { 22 | const commit = await readJsonRaw(repo, h.previousCommit) 23 | await visitHead(commit, source) 24 | } 25 | } 26 | 27 | const stats = await collectAllStats(repo) 28 | const merged = mergeRepoStats(stats) 29 | 30 | const allObjects = new Set([ 31 | ...merged.objects, 32 | ...merged.stores, 33 | ...merged.indices, 34 | ...merged.commits, 35 | ]) 36 | 37 | const data: Record = {} 38 | for (const h of allObjects) { 39 | data[h] = await repo.readData(h) 40 | } 41 | 42 | const block = createBlock(Object.entries(data)) 43 | await getFs().writeFile(dest, block) 44 | 45 | // for (const h of await repo.listHeads()) { 46 | // await visitHead(h, h.id) 47 | // } 48 | 49 | // // XXX: doing this all in-mem = fast way to OOM 50 | // const blocks = new Map() 51 | // for (const h of indices) { 52 | // const data = await repo.serializeBuildFs(await repo.getBuildFs(h)) 53 | // const block = createBlock(Object.entries(data)) 54 | // blocks.set(h, block) 55 | // } 56 | 57 | // console.log([...blocks.values()].reduce((a, b) => a + b.byteLength, 0) / (1024 * 1024)) 58 | } -------------------------------------------------------------------------------- /src/build-fs/gcWorker.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs/promises' 2 | import * as path from 'node:path' 3 | import * as child_process from 'node:child_process' 4 | import { getFs, runWithContext } from '../execution' 5 | import { ensureDir, watchForFile } from '../system' 6 | import { getLogger } from '../logging' 7 | import { getBuildDir, getLogsDirectory, getUserSynapseDirectory } from '../workspaces' 8 | import { Bundle } from 'synapse:lib' 9 | import { getGcInfoPath, startGarbageCollection } from './gc' 10 | import { logToStderr } from '../cli/logger' 11 | 12 | async function runGc() { 13 | const buildDir = process.argv[2] 14 | if (!buildDir) { 15 | throw new Error(`No build dir provided`) 16 | } 17 | 18 | logToStderr(getLogger()) 19 | process.send?.({ status: 'ready' }) 20 | await runWithContext({}, () => startGarbageCollection(buildDir)) 21 | await getFs().writeFile(getGcInfoPath(), JSON.stringify({})) 22 | } 23 | 24 | const startFn = new Bundle(runGc, { 25 | immediatelyInvoke: true, 26 | }) 27 | 28 | const getLogsFile = () => path.resolve(getLogsDirectory(), 'gc.log') 29 | 30 | export async function startGcProcess(buildDir: string) { 31 | const logFile = getLogsFile() 32 | await ensureDir(logFile) 33 | const log = await fs.open(logFile, 'w') 34 | const proc = child_process.fork( 35 | startFn.destination, 36 | [buildDir], 37 | { 38 | stdio: ['ignore', log.fd, log.fd, 'ipc'], 39 | detached: true, 40 | } 41 | ) 42 | 43 | await new Promise((resolve, reject) => { 44 | function onMessage(ev: child_process.Serializable) { 45 | if (typeof ev === 'object' && !!ev && 'status' in ev && ev.status === 'ready') { 46 | close() 47 | } 48 | } 49 | 50 | function onExit(code: number | null, signal: NodeJS.Signals | null) { 51 | if (code) { 52 | close(new Error(`Non-zero exit code: ${code}\n logs: ${logFile}`)) 53 | } else if (signal) { 54 | close(new Error(`Received signal to exit: ${signal}`)) 55 | } 56 | close(new Error(`Process exited without sending a message`)) 57 | } 58 | 59 | function close(err?: any) { 60 | if (err) { 61 | reject(err) 62 | } else { 63 | resolve() 64 | } 65 | proc.removeListener('message', onMessage) 66 | proc.removeListener('error', close) 67 | proc.removeListener('error', onExit) 68 | } 69 | 70 | proc.on('message', onMessage) 71 | proc.on('error', close) 72 | proc.on('exit', onExit) 73 | }).finally(() => log.close()) 74 | 75 | proc.unref() 76 | proc.disconnect() 77 | } 78 | -------------------------------------------------------------------------------- /src/build-fs/remote.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { DataRepository, Head } from '../artifacts' 3 | import { getLogger } from '../logging' 4 | import * as projects from '@cohesible/resources/projects' 5 | import { getFs } from '../execution' 6 | import { createBlock, openBlock } from './block' 7 | import { getHash, gunzip, gzip, memoize, sortRecord } from '../utils' 8 | 9 | export interface RemoteArtifactRepository { 10 | // listHeads(): Promise 11 | getHead(id: string): Promise 12 | putHead(head: Head): Promise 13 | pull(storeHash: string): Promise 14 | push(storeHash: string): Promise 15 | getObject(hash: string): Promise 16 | putObject(data: Uint8Array): Promise 17 | } 18 | 19 | export function createRemoteArtifactRepo( 20 | repo: DataRepository, 21 | projectId: string 22 | ): RemoteArtifactRepository { 23 | const getClient = memoize(() => projects.createClient()) 24 | 25 | async function putHead(head: Head) { 26 | await getClient().putHead(projectId, { 27 | ...head, 28 | indexHash: head.storeHash, 29 | }) 30 | } 31 | 32 | async function getHead(id: Head['id']): Promise { 33 | const head = await getClient().getHead(projectId, id).catch(e => { 34 | if ((e as any).statusCode !== 404) { 35 | throw e 36 | } 37 | }) 38 | if (!head) { 39 | return 40 | } 41 | 42 | const normalized = sortRecord({ 43 | ...head, 44 | indexHash: undefined, 45 | storeHash: head.indexHash, 46 | }) as any as Head 47 | 48 | if (normalized.commitHash) { 49 | const data = Buffer.from(JSON.stringify({ ...normalized, commitHash: undefined }), 'utf-8') 50 | await repo.writeData(normalized.commitHash, data) 51 | } 52 | 53 | return normalized 54 | } 55 | 56 | async function getObject(hash: string): Promise { 57 | getLogger().debug(`Pulling object`, hash) 58 | 59 | const data = await getClient().getObject(hash, 'object') 60 | await repo.writeData(hash, data) 61 | 62 | return data 63 | } 64 | 65 | async function putObject(data: Uint8Array): Promise { 66 | const hash = getHash(data) 67 | getLogger().debug(`Pushing object`, hash) 68 | await getClient().putObject(hash, Buffer.from(data).toString('base64'), 'object') 69 | 70 | return hash 71 | } 72 | 73 | async function pull(buildFsHash: string): Promise { 74 | if (await repo.hasData(buildFsHash)) { 75 | getLogger().debug(`Skipped pulling index`, buildFsHash) 76 | return 77 | } 78 | 79 | getLogger().debug(`Pulling index`, buildFsHash) 80 | 81 | const block = await getClient().getObject(buildFsHash, 'block').then(gunzip) 82 | const b = openBlock(block) 83 | await Promise.all(b.listObjects().map(h => repo.writeData(h, b.readObject(h)))) 84 | } 85 | 86 | async function push(buildFsHash: string): Promise { 87 | getLogger().debug(`Pushing index`, buildFsHash) 88 | 89 | const buildFs = await repo.getBuildFs(buildFsHash) 90 | const data = await repo.serializeBuildFs(buildFs) 91 | const block = createBlock(Object.entries(data)) 92 | const zipped = await gzip(block) 93 | getLogger().log(`block size (${buildFsHash}): ${zipped.byteLength} compressed; ${block.byteLength} uncompressed`) 94 | 95 | const hash = getHash(zipped) 96 | await getClient().putObject(hash, zipped.toString('base64'), 'block', buildFsHash) 97 | } 98 | 99 | return { 100 | getHead, 101 | putHead, 102 | pull, 103 | push, 104 | getObject, 105 | putObject, 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/build-fs/utils.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { getFs } from '../execution' 3 | import { DataRepository, getDataRepository, getPrefixedPath } from '../artifacts' 4 | 5 | export async function findArtifactByPrefix(repo: DataRepository, prefix: string) { 6 | if (prefix.length === 64) { 7 | return prefix 8 | } 9 | 10 | const prefixedPath = getPrefixedPath(prefix) 11 | const dataDir = repo.getDataDir() 12 | const dirPath = path.resolve(dataDir, path.dirname(prefixedPath)) 13 | const basename = path.basename(prefixedPath) 14 | const matches: string[] = [] 15 | for (const f of await getFs().readDirectory(dirPath)) { 16 | if (f.type === 'file' && f.name.startsWith(basename)) { 17 | matches.push(f.name) 18 | } 19 | } 20 | 21 | if (matches.length > 1) { 22 | throw new Error(`Ambiguous match: ${matches.join('\n')}`) 23 | } 24 | 25 | if (!matches[0]) { 26 | return 27 | } 28 | 29 | const rem = path.relative(dataDir, dirPath).split('/').join('') 30 | 31 | return rem + matches[0] 32 | } 33 | 34 | export async function getArtifactByPrefix(repo: DataRepository, prefix: string) { 35 | const r = await findArtifactByPrefix(repo, prefix) 36 | if (!r) { 37 | throw new Error(`Object not found: ${prefix}`) 38 | } 39 | 40 | return r 41 | } 42 | 43 | 44 | export function getObjectByPrefix(prefix: string, repo = getDataRepository()) { 45 | if (prefix.startsWith('pointer:')) { 46 | prefix = prefix.slice('pointer:'.length) 47 | } 48 | return getArtifactByPrefix(repo, prefix.replace(/\//g, '')) 49 | } 50 | 51 | export async function getMetadata(repo: DataRepository, target: string) { 52 | const [source, hash] = target.split(':') 53 | 54 | const resolvedSource = await findArtifactByPrefix(repo, source) 55 | const resolvedHash = await findArtifactByPrefix(repo, hash) 56 | 57 | if (!resolvedSource) { 58 | throw new Error(`Did not find source matching hash: ${source}`) 59 | } 60 | 61 | if (!resolvedHash) { 62 | throw new Error(`Did not find object matching hash: ${hash}`) 63 | } 64 | 65 | return repo.getMetadata(resolvedHash, resolvedSource) 66 | } 67 | 68 | -------------------------------------------------------------------------------- /src/build/builder.ts: -------------------------------------------------------------------------------- 1 | 2 | import * as os from 'node:os' 3 | 4 | // Terminology: 5 | // * Project - source code location 6 | // * Package - bundled distributable artifacts 7 | // * Build - a graph node that consumes inputs to produce artifacts 8 | // * Defines - build-time variables 9 | 10 | // Dependencies are always represented as artifacts rather than the producer of said artifacts 11 | // * In some cases, the package is the artifact (e.g. `npm` packages) 12 | 13 | export interface BuildTarget { 14 | mode?: 'debug' | 'release' 15 | os?: string 16 | arch?: string 17 | runtime?: string // Qualifies shared libs e.g. `glibc`, `node`, `browser` are all runtimes 18 | } 19 | 20 | 21 | export type Os = 'linux' | 'darwin' | 'windows' | 'freebsd' 22 | export type Arch = 'aarch64' | 'x64' 23 | 24 | export interface QualifiedBuildTarget { 25 | readonly os: Os 26 | readonly arch: Arch 27 | readonly endianness: 'LE' | 'BE' 28 | readonly libc?: 'musl' | 'gnu' 29 | } 30 | 31 | function parseOs(osType: string): Os { 32 | switch (osType) { 33 | case 'Darwin': 34 | return 'darwin' 35 | case 'Linux': 36 | return 'linux' 37 | case 'Windows_NT': 38 | return 'windows' 39 | 40 | default: 41 | throw new Error(`OS not supported: ${osType}`) 42 | } 43 | } 44 | 45 | function parseArch(arch: string): Arch { 46 | switch (arch) { 47 | case 'arm64': 48 | return 'aarch64' 49 | case 'x64': 50 | return arch 51 | 52 | default: 53 | throw new Error(`Architecture not supported: ${arch}`) 54 | } 55 | } 56 | 57 | export function resolveBuildTarget(target?: Partial): QualifiedBuildTarget { 58 | const _os = target?.os ?? parseOs(os.type()) 59 | const arch = target?.arch ?? parseArch(os.arch()) 60 | const endianness = target?.endianness ?? os.endianness() 61 | 62 | return { 63 | ...target, 64 | os: _os, 65 | arch, 66 | endianness, 67 | } 68 | } 69 | 70 | export function toNodeArch(arch: Arch): NodeJS.Architecture { 71 | switch (arch) { 72 | case 'aarch64': 73 | return 'arm64' 74 | 75 | default: 76 | return arch 77 | } 78 | } 79 | 80 | export function toNodePlatform(os: Os): NodeJS.Platform { 81 | switch (os) { 82 | case 'windows': 83 | return 'win32' 84 | 85 | default: 86 | return os 87 | } 88 | } 89 | 90 | export interface CommonParams { 91 | readonly defines?: Record 92 | readonly target?: BuildTarget 93 | } 94 | 95 | export interface BuildSourceParams extends CommonParams { 96 | readonly sourceDir: string 97 | readonly output?: string 98 | } 99 | 100 | -------------------------------------------------------------------------------- /src/build/go.ts: -------------------------------------------------------------------------------- 1 | import { runCommand } from "../utils/process" 2 | import { BuildSourceParams } from "./builder" 3 | 4 | interface Overlay { 5 | Replace: Record 6 | } 7 | 8 | // -C dir <-- changes to this dir, must be first flag 9 | // -race 10 | // -msan 11 | // -asan 12 | // -installsuffix suffix 13 | 14 | // go tool dist list 15 | 16 | type Os = 17 | | 'linux' 18 | | 'darwin' 19 | | 'windows' 20 | | 'solaris' 21 | | 'plan9' 22 | | 'openbsd' 23 | | 'netbsd' 24 | | 'freebsd' 25 | | 'ios' // mobile 26 | | 'android' // mobile 27 | 28 | | 'aix' 29 | | 'illumos' 30 | | 'dragonfly' 31 | 32 | | 'js' // wasm 33 | 34 | type Arch = 35 | | 'arm' 36 | | '386' 37 | | 'arm64' 38 | | 'amd64' 39 | 40 | type BuildMode = 41 | | 'archive' 42 | | 'c-archive' 43 | | 'c-shared' 44 | | 'default' 45 | | 'shared' 46 | | 'exe' 47 | | 'pie' 48 | | 'plugin' 49 | 50 | // CGO_ENABLED=0 51 | 52 | interface RawBuildParams { 53 | // -overlay file 54 | // -pgo file 55 | // -gccgoflags 56 | // -gcflags 57 | // -ldflags 58 | 59 | cwd?: string 60 | cgo?: boolean 61 | os?: string 62 | arch?: string 63 | output?: string // File or directory 64 | moduleMode?: 'readonly' | 'vendor' | 'mod' 65 | ldflags?: string // https://pkg.go.dev/cmd/link 66 | trimpath?: boolean 67 | // asmflags?: string 68 | // compiler?: 'gc' | 'gccgo' 69 | // packages?: string[] 70 | } 71 | 72 | async function runGoBuild(params: RawBuildParams) { 73 | const env: Record = { ...process.env } 74 | if (params.cgo === false) { 75 | env['CGO_ENABLED'] = '0' 76 | } 77 | 78 | if (params.os) { 79 | env['GOOS'] = params.os 80 | } 81 | 82 | if (params.arch) { 83 | env['GOARCH'] = params.arch 84 | } 85 | 86 | const args = ['build'] 87 | if (params.trimpath) { 88 | args.push('-trimpath') 89 | } 90 | 91 | if (params.moduleMode) { 92 | args.push(`-mod`, params.moduleMode) 93 | } 94 | 95 | if (params.ldflags) { 96 | args.push('-ldflags', `"${params.ldflags}"`) 97 | } 98 | 99 | if (params.output) { 100 | args.push('-o', params.output) 101 | } 102 | 103 | await runCommand('go', args, { env, cwd: params.cwd }) 104 | } 105 | 106 | 107 | function resolveParams(params: BuildSourceParams): RawBuildParams { 108 | const res: RawBuildParams = { 109 | cwd: params.sourceDir, 110 | moduleMode: 'readonly', 111 | } 112 | 113 | const ldflags: string[] = [] 114 | 115 | if (params.target) { 116 | if (params.target.mode === 'release') { 117 | ldflags.push('-s', '-w') 118 | res.trimpath = true 119 | res.cgo = false 120 | } 121 | 122 | res.os = params.target.os 123 | res.arch = params.target.arch 124 | if (res.arch === 'aarch64') { 125 | res.arch = 'arm64' 126 | } else if (res.arch === 'x64') { 127 | res.arch = 'amd64' 128 | } 129 | } 130 | 131 | if (params.defines) { 132 | for (const [k, v] of Object.entries(params.defines)) { 133 | ldflags.push('-X', `'${k}=${v}'`) 134 | } 135 | } 136 | 137 | if (ldflags.length > 0) { 138 | res.ldflags = ldflags.join(' ') 139 | } 140 | 141 | if (params.output) { 142 | res.output = params.output 143 | } 144 | 145 | return res 146 | } 147 | 148 | export async function buildGoProgram(params: BuildSourceParams) { 149 | const resolved = resolveParams(params) 150 | await runGoBuild(resolved) 151 | } -------------------------------------------------------------------------------- /src/build/sources.ts: -------------------------------------------------------------------------------- 1 | import * as git from '../git' 2 | import * as path from 'node:path' 3 | import { getFs } from '../execution' 4 | import { getUserSynapseDirectory } from '../workspaces' 5 | 6 | const getSourcesDirs = () => path.resolve(getUserSynapseDirectory(), 'build', 'sources') 7 | const getPkgName = (url: string) => url.replace(/^https?:\/\//, '').replace(/\.git$/, '') 8 | 9 | interface GitSource { 10 | readonly type: 'git' 11 | readonly url: string 12 | readonly commitish: string 13 | } 14 | 15 | export async function downloadSource(source: GitSource) { 16 | const dest = path.resolve(getSourcesDirs(), getPkgName(source.url), source.commitish) 17 | const fs = getFs() 18 | if (await fs.fileExists(dest)) { 19 | await git.fetchOriginHead(dest, source.commitish) 20 | 21 | return dest 22 | } 23 | 24 | await git.fetchRepo(dest, source.url, source.commitish) 25 | 26 | return dest 27 | } 28 | 29 | -------------------------------------------------------------------------------- /src/cli/completions/completion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if type complete &>/dev/null; then 4 | _synapse_completion () { 5 | local words cword 6 | if type _get_comp_words_by_ref &>/dev/null; then 7 | _get_comp_words_by_ref -n = -n @ -n : -w words -i cword 8 | else 9 | cword="$COMP_CWORD" 10 | words=("${COMP_WORDS[@]}") 11 | fi 12 | 13 | local si="$IFS" 14 | if ! IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \ 15 | COMP_LINE="$COMP_LINE" \ 16 | COMP_POINT="$COMP_POINT" \ 17 | synapse completion -- "${words[@]}" \ 18 | 2>/dev/null)); then 19 | local ret=$? 20 | IFS="$si" 21 | return $ret 22 | fi 23 | IFS="$si" 24 | if type __ltrim_colon_completions &>/dev/null; then 25 | __ltrim_colon_completions "${words[cword]}" 26 | fi 27 | } 28 | complete -o default -F _synapse_completion synapse 29 | elif type compdef &>/dev/null; then # zsh 30 | _synapse_completion() { 31 | compadd -- $(COMP_CWORD=$((CURRENT-1)) \ 32 | COMP_LINE=$BUFFER \ 33 | COMP_POINT=0 \ 34 | synapse completion -- "${words[@]}" \ 35 | 2>/dev/null) 36 | } 37 | compdef _synapse_completion synapse 38 | elif type compctl &>/dev/null; then 39 | _synapse_completion () { 40 | local cword line point words si 41 | read -Ac words 42 | read -cn cword 43 | let cword-=1 44 | read -l line 45 | read -ln point 46 | si="$IFS" 47 | if ! IFS=$'\n' reply=($(COMP_CWORD="$cword" \ 48 | COMP_LINE="$line" \ 49 | COMP_POINT="$point" \ 50 | synapse completion -- "${words[@]}" \ 51 | 2>/dev/null)); then 52 | 53 | local ret=$? 54 | IFS="$si" 55 | return $ret 56 | fi 57 | IFS="$si" 58 | } 59 | compctl -K _synapse_completion synapse 60 | fi 61 | -------------------------------------------------------------------------------- /src/codegen/openapiHelpers.ts: -------------------------------------------------------------------------------- 1 | type ParameterLocation = { 2 | readonly in?: "path"; 3 | readonly style?: "matrix" | "label" | "simple"; 4 | readonly required: true; 5 | readonly 'x-multi-segment'?: boolean 6 | } | { 7 | readonly in?: "query"; 8 | readonly style?: "form" | "spaceDelimited" | "pipeDelimited" | "deepObject"; 9 | readonly required?: boolean; 10 | readonly 'x-multi-segment'?: boolean 11 | readonly deprecated?: boolean 12 | 13 | } | { 14 | readonly in?: "header"; 15 | readonly style?: "simple"; 16 | } | { 17 | readonly in?: "cookie"; 18 | readonly style?: "form"; 19 | }; 20 | 21 | type Param = ParameterLocation & { 22 | name: string 23 | } 24 | 25 | function buildRequest(pathTemplate: string, params: Param[], req: Record) { 26 | const query = new URLSearchParams() 27 | for (const p of params) { 28 | const val = req[p.name] 29 | if (val === undefined) { 30 | if ((p as any).required) { 31 | throw new Error(`Missing parameter: ${p.name}`) 32 | } 33 | continue 34 | } 35 | 36 | switch (p.in) { 37 | case 'path': 38 | pathTemplate = pathTemplate.replace(`{${p.name}}`, val) 39 | continue 40 | 41 | case 'query': 42 | if (p.style) { 43 | throw new Error(`Query style not implemented: ${p.style}`) 44 | } 45 | query.set(p.name, val) 46 | continue 47 | 48 | default: 49 | throw new Error(`Param type not implemented: ${p.in}`) 50 | } 51 | } 52 | 53 | const queryStr = query.size > 0 ? `?${query.toString()}` : '' 54 | 55 | return { 56 | path: `${pathTemplate}${queryStr}`, 57 | body: req.body, 58 | } 59 | } 60 | 61 | async function sendRequest( 62 | baseUrl: string, 63 | method: string, 64 | pathTemplate: string, 65 | params: Param[], 66 | req: Record, 67 | authorization?: string | (() => string | Promise) 68 | ) { 69 | const built = buildRequest(pathTemplate, params, req) 70 | const headers: Record = { 'user-agent': 'synapse' } 71 | const auth = typeof authorization === 'string' ? authorization : await authorization?.() 72 | if (auth) { 73 | headers['authorization'] = auth 74 | } 75 | 76 | const url = new URL(built.path, baseUrl) 77 | 78 | return fetch(url, { 79 | headers, 80 | method, 81 | body: built.body, 82 | }) 83 | } 84 | -------------------------------------------------------------------------------- /src/deploy/state.ts: -------------------------------------------------------------------------------- 1 | // TODO: move relevant util functions to this file 2 | 3 | export interface TfResourceInstance { 4 | status?: 'tainted' 5 | schema_version: number 6 | attributes: Record 7 | private?: string 8 | create_before_destroy?: boolean 9 | dependencies?: string[] 10 | sensitive_attributes?: { 11 | type: 'get_attr' 12 | value: string 13 | }[] 14 | } 15 | 16 | export interface TfResourceOld { 17 | type: string 18 | name: string 19 | provider: string 20 | instances: TfResourceInstance[] 21 | } 22 | 23 | export interface TfResource { 24 | type: string 25 | name: string 26 | provider: string 27 | state: TfResourceInstance 28 | } 29 | 30 | export interface TfStateOld { 31 | version: number 32 | serial: number 33 | lineage: string 34 | resources: TfResourceOld[] 35 | } 36 | 37 | export interface TfState { 38 | version: number 39 | serial: number 40 | lineage: string 41 | resources: TfResource[] 42 | } 43 | 44 | export interface AnnotatedTfState extends TfState { 45 | serial: number 46 | lineage: string 47 | } 48 | -------------------------------------------------------------------------------- /src/events.ts: -------------------------------------------------------------------------------- 1 | // FIXME: `default` imports don't work correctly for cjs bundles 2 | // import EventEmitter from 'node:events' 3 | import { EventEmitter } from 'node:events' 4 | export { EventEmitter } 5 | 6 | export interface Disposable { 7 | dispose: () => void 8 | } 9 | 10 | export interface Event { 11 | fire(...args: T): void 12 | on(listener: (...args: T) => void): Disposable 13 | } 14 | 15 | export function createEventEmitter() { 16 | return new EventEmitter() 17 | } 18 | 19 | const listenerSymbol = Symbol('listener') 20 | 21 | interface ListenerEvent { 22 | readonly eventName: string | symbol 23 | readonly mode: 'added' | 'removed' 24 | } 25 | 26 | export function addMetaListener(emitter: EventEmitter, listener: (ev: ListenerEvent) => void) { 27 | emitter.on(listenerSymbol, listener) 28 | 29 | return { dispose: () => emitter.removeListener(listenerSymbol, listener) } 30 | } 31 | 32 | export type EventEmitter2 = (listener: (ev: T) => void) => Disposable 33 | 34 | export function createEvent(emitter: EventEmitter, type: U): Event { 35 | return { 36 | fire: (...args) => emitter.emit(type, ...args), 37 | on: listener => { 38 | emitter.on(type, listener as any) 39 | emitter.emit(listenerSymbol, { eventName: type, mode: 'added' }) 40 | 41 | function dispose() { 42 | emitter.removeListener(type, listener as any) 43 | emitter.emit(listenerSymbol, { eventName: type, mode: 'removed' }) 44 | } 45 | 46 | return { dispose } 47 | }, 48 | } 49 | } 50 | 51 | export function once(event: Event, fn: (...args: T) => void): Disposable { 52 | const d = event.on((...args) => { 53 | d.dispose() 54 | fn(...args) 55 | }) 56 | 57 | return d 58 | } 59 | -------------------------------------------------------------------------------- /src/pm/attestations.ts: -------------------------------------------------------------------------------- 1 | // Variant of https://github.com/secure-systems-lab/dsse using UTF-8 for the payload 2 | 3 | export interface Envelope { 4 | readonly payload: string // utf-8 5 | readonly payloadType: string 6 | readonly signatures: { 7 | readonly keyid: string 8 | readonly sig: string // base64url 9 | }[] 10 | } 11 | 12 | export interface KeyPair { 13 | readonly id: string 14 | sign(data: Buffer): Promise 15 | verify(data: Buffer, sig: Buffer): Promise 16 | } 17 | 18 | function createHeader(envelope: Omit) { 19 | const data = Buffer.from(envelope.payload, 'utf-8') 20 | const type = Buffer.from(envelope.payloadType, 'utf-8') 21 | 22 | return Buffer.concat([ 23 | Buffer.from(String(type.byteLength) + ' ', 'utf-8'), 24 | type, 25 | Buffer.from(' ' + String(data.byteLength) + ' ', 'utf-8'), 26 | data, 27 | ]) 28 | } 29 | 30 | export async function sign(envelope: Omit, key: Pick) { 31 | const header = createHeader(envelope) 32 | const sig = Buffer.from(await key.sign(header)).toString('base64url') 33 | 34 | return { keyid: key.id, sig } 35 | } 36 | 37 | export async function verify(envelope: Envelope, key: Pick) { 38 | const signature = envelope.signatures[0] 39 | if (!signature) { 40 | throw new Error(`Envelope is missing a signature`) 41 | } 42 | 43 | if (signature.keyid !== key.id) { 44 | throw new Error(`Found different key ids: ${signature.keyid} !== ${key.id}`) 45 | } 46 | 47 | const header = createHeader(envelope) 48 | 49 | return key.verify(header, Buffer.from(signature.sig, 'base64url')) 50 | } -------------------------------------------------------------------------------- /src/runtime/env.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { getWorkingDir } from '../workspaces' 3 | import { getBuildTarget } from '../execution' 4 | import { getLogger } from '../logging' 5 | import { Fs } from '../system' 6 | import { throwIfNotFileNotFoundError } from '../utils' 7 | 8 | // Maybe do backticks too 9 | function unquote(str: string) { 10 | const isSingleQuoted = str[0] === "'" && str.at(-1) === "'" 11 | const isDoubleQuoted = !isSingleQuoted && str[0] === '"' && str.at(-1) === '"' 12 | if (isSingleQuoted || isDoubleQuoted) { 13 | return str.slice(1, -1) 14 | } 15 | 16 | return str 17 | } 18 | 19 | export function parseEnvFile(text: string) { 20 | const result: Record = {} 21 | 22 | const lines = text.split(/\r?\n/) 23 | for (const l of lines) { 24 | const sep = l.indexOf('=') 25 | if (sep === -1) { 26 | // bad parse 27 | continue 28 | } 29 | 30 | const key = l.slice(0, sep).trimEnd() 31 | const value = l.slice(sep + 1).trimStart() 32 | result[key] = unquote(value) 33 | } 34 | 35 | return result 36 | } 37 | 38 | export function getCurrentEnvFilePath() { 39 | const environment = getBuildTarget()?.environmentName 40 | const suffix = environment ? `.${environment}` : '' 41 | 42 | return path.resolve(getWorkingDir(), `.env${suffix}`) 43 | } 44 | 45 | // TODO: this isn't clean 46 | const cachedEnvVars = new Map>() 47 | export async function maybeLoadEnvironmentVariables(fs: Pick) { 48 | const filePath = getCurrentEnvFilePath() 49 | if (cachedEnvVars.has(filePath)) { 50 | return cachedEnvVars.get(filePath)! 51 | } 52 | 53 | getLogger().debug(`Trying to load environment variables from "${filePath}"`) 54 | 55 | const text = await fs.readFile(filePath, 'utf-8').catch(throwIfNotFileNotFoundError) 56 | if (!text) { 57 | return 58 | } 59 | 60 | const vars = parseEnvFile(text) 61 | cachedEnvVars.set(filePath, vars) 62 | for (const k of Object.keys(vars)) { 63 | process.env[k] = vars[k] 64 | } 65 | 66 | getLogger().debug(`Loaded environment variables: ${Object.keys(vars)}`) 67 | 68 | return vars 69 | } 70 | -------------------------------------------------------------------------------- /src/runtime/modules/reify.ts: -------------------------------------------------------------------------------- 1 | //@internal 2 | //# moduleId = synapse:reify 3 | 4 | import type { Schema, TypedObjectSchema, TypedArraySchema, TypedStringSchema, TypedNumberSchema } from 'synapse:validation' 5 | 6 | 7 | export declare function schema(): never 8 | export declare function schema(): TypedArraySchema 9 | export declare function schema(): TypedObjectSchema 10 | export declare function schema(): TypedStringSchema 11 | export declare function schema(): TypedNumberSchema 12 | 13 | /** @internal */ 14 | export function __schema(obj: any): any { 15 | return obj 16 | } 17 | 18 | // export declare function check(val: unknown): asserts val is T 19 | -------------------------------------------------------------------------------- /src/runtime/rootLoader.ts: -------------------------------------------------------------------------------- 1 | import * as esbuild from 'esbuild' 2 | import { readFileSync, existsSync } from 'node:fs' 3 | import { DataRepository, getDataRepository } from '../artifacts' 4 | import { getBuildTarget, getFs } from '../execution' 5 | import { Fs, SyncFs } from '../system' 6 | import { getV8CacheDirectory } from '../workspaces' 7 | import { createModuleLoader, BasicDataRepository } from './loader' 8 | import { ModuleResolver, createModuleResolver } from './resolver' 9 | import { createCodeCache } from './utils' 10 | import { setupEsbuild } from '../bundler' 11 | 12 | function toBuffer(arr: Uint8Array): Buffer { 13 | return Buffer.isBuffer(arr) ? arr : Buffer.from(arr) 14 | } 15 | 16 | export function createBasicDataRepo(repo: DataRepository): BasicDataRepository { 17 | function getDataSync(hash: string): Buffer 18 | function getDataSync(hash: string, encoding: BufferEncoding): string 19 | function getDataSync(hash: string, encoding?: BufferEncoding) { 20 | const data = toBuffer(repo.readDataSync(hash)) 21 | 22 | return encoding ? data.toString(encoding) : data 23 | } 24 | 25 | return { getDataSync, getMetadata: repo.getMetadata } 26 | } 27 | 28 | export function createModuleResolverForBundling(fs: Fs & SyncFs, workingDirectory: string): ModuleResolver { 29 | const resolver = createModuleResolver(fs, workingDirectory) 30 | 31 | // Need to patch this file because it's not compatible w/ bundling to ESM 32 | resolver.registerPatch({ 33 | name: '@aws-crypto/util', 34 | // version: 3.0.0 35 | files: { 36 | 'build/convertToBuffer.js': contents => contents.replace('require("@aws-sdk/util-utf8-browser")', '{}') 37 | } 38 | }) 39 | 40 | return resolver 41 | } 42 | 43 | function loadEsbuildWithWorkersDisabled() { 44 | process.env['ESBUILD_WORKER_THREADS'] = '0' 45 | setupEsbuild() 46 | delete process.env['ESBUILD_WORKER_THREADS'] 47 | } 48 | 49 | function createTypescriptLoader() { 50 | return (fileName: string, format: 'cjs' | 'esm' = 'cjs') => { 51 | loadEsbuildWithWorkersDisabled() 52 | 53 | // TODO: add option to configure sourcemap 54 | // TODO: add transform cache to avoid calls to `esbuild` 55 | const contents = readFileSync(fileName) 56 | const res = esbuild.transformSync(contents, { format, loader: 'ts', sourcemap: 'inline' }) 57 | 58 | return res.code 59 | } 60 | } 61 | 62 | export function createMinimalLoader(useTsLoader = false) { 63 | const bt = getBuildTarget() 64 | const workingDirectory = bt?.workingDirectory ?? process.cwd() 65 | const codeCache = createCodeCache(getFs(), getV8CacheDirectory()) 66 | const typescriptLoader = useTsLoader ? createTypescriptLoader() : undefined 67 | 68 | const loader = createModuleLoader( 69 | { readFileSync }, 70 | workingDirectory, 71 | createModuleResolver({ readFileSync, fileExistsSync: existsSync }, workingDirectory, useTsLoader), 72 | { 73 | codeCache, 74 | workingDirectory, 75 | typescriptLoader, 76 | dataRepository: bt ? createBasicDataRepo(getDataRepository(getFs(), bt.buildDir)) : undefined, 77 | useThisContext: true, 78 | } 79 | ) 80 | 81 | function loadModule(id: string, origin?: string) { 82 | if (id.endsWith('.mjs')) { 83 | return loader.loadEsm(id, origin) 84 | } 85 | 86 | return loader.loadCjs(id, origin) 87 | } 88 | 89 | return { loadModule } 90 | } -------------------------------------------------------------------------------- /src/runtime/srl/README.md: -------------------------------------------------------------------------------- 1 | ## Introduction 2 | 3 | The Standard Resource Library (SRL) is a collection of interfaces and utility functions that abstract away services provided by cloud providers. Common interfaces facilitate creating "cloud-agnostic" applications, which can be deployed to any cloud provider. 4 | 5 | Having a standard right from the start, even if minimal, is intended to at least get the industry thinking about standardization. In all liklihood, standardization of cloud technology will take many years or even decades. As such, incremental standardization is a key requirement of the SRL. 6 | 7 | 8 | ### Balancing applicability and flexibility (WIP) 9 | 10 | All abstractions come with a tradeoff: the more general something becomes, the fewer "free variables" can be changed by producers or relied on by consumers. Abstractions fundamentally restrict the information space in the domain they are applied. Information restrictions impede customization; the less I know about something, the less I can change. So an ideal abstraction is one that both minimizes information restriction while maximizing the number of consumers who can reliably use the abstraction. 11 | 12 | For resources, this means allowing for inputs that do not fundamentally change the resulting interface. I should be able to use a resource the same way regardless of how it was created. Here are a few guidelines for creating such interfaces: 13 | 14 | * Prefer generic (parameterized) interfaces 15 | * Minimize the number of methods and properties 16 | * When using discrete outputs: 17 | * Avoid "optional" (nullable) fields 18 | * Keep the result of methods as simple as possible 19 | * Avoid "convenience" methods, especially ones that create resources 20 | * Prefering creating a separate function/resource declaration instead 21 | * Avoid optional parameters that only change the output shape, not behavior 22 | * Prefer creating utility functions instead. Documentation should have examples using the functions to increase visibility. 23 | 24 | 25 | In practice, using a resource "the same way" has many asterisks attached. For example, a resource from one cloud provider may be able to tolerate much higher workloads compared to another one. This can cause all sorts of problems in applications that depend on specific timings or performance characteristics. 26 | 27 | 28 | ### Per-vendor customizations 29 | Taking inspiration from CSS vendor parameters, all SRL resources can be customized on a per-vendor basis. The idea is that this would allow "progressive enhancement" of applications, exposing additional features to the operator. Such things might include logging, analytics, or premium features that would otherwise be undesireable in the baseline implementation. 30 | 31 | 32 | ### Compliance Checking 33 | Every resource interface has a baseline test suite that can be used to test for compliance. The nature of integrations means that it is up to the _integrator_ to run these tests. In the future, compliance checking could be centralized to give users more confidence in the libraries they're using. 34 | 35 | These tests _only_ check that the implementation adheres to the interface; additional characteristics such as performance are not tested. Standardized performance tests are a possible future addition to the SRL. 36 | 37 | 38 | ### Governance (WIP) 39 | Ideally, the SRL would be governed by an international standards organization. Until then, Cohesible will have stewardship over the SRL. -------------------------------------------------------------------------------- /src/runtime/srl/index.ts: -------------------------------------------------------------------------------- 1 | //# moduleId = synapse:srl 2 | 3 | export * as net from './net' 4 | export * as compute from './compute' 5 | export * as storage from './storage' 6 | 7 | //# resource = true 8 | export declare class Provider { 9 | constructor(props?: any) 10 | } 11 | 12 | // Re-exporting common resources so they're easier to find 13 | // export { Function, HttpService } from './compute' 14 | // export { Bucket, Queue, Table, Counter } from './storage' 15 | -------------------------------------------------------------------------------- /src/runtime/srl/net/index.ts: -------------------------------------------------------------------------------- 1 | //# moduleId = synapse:srl/net 2 | 3 | //--------------- NETWORKKING ---------------// 4 | 5 | interface RouteProps { 6 | readonly type?: 'ipv4' | 'ipv6' 7 | readonly destination: string 8 | // LoadBalancer? 9 | } 10 | 11 | // creates a route within a network 12 | //# resource = true 13 | /** @internal */ 14 | export declare class Route { 15 | constructor(network: RouteTable | Network, props: RouteProps) 16 | } 17 | 18 | //# resource = true 19 | /** @internal */ 20 | export declare class RouteTable {} 21 | 22 | interface SubnetProps { 23 | readonly cidrBlock?: string 24 | readonly ipv6CidrBlock?: string 25 | } 26 | 27 | //# resource = true 28 | /** @internal */ 29 | export declare class Subnet { 30 | constructor(network: Network, props?: SubnetProps) 31 | } 32 | 33 | //# resource = true 34 | /** @internal */ 35 | export declare class Network { 36 | readonly subnets: Subnet[] 37 | } 38 | 39 | //# resource = true 40 | /** @internal */ 41 | export declare class InternetGateway { 42 | constructor() 43 | } 44 | 45 | // For east-west traffic (IGW is north-south) 46 | //# resource = true 47 | /** @internal */ 48 | export declare class TransitGateway { 49 | constructor() 50 | 51 | public addNetwork(network: Network): void 52 | } 53 | 54 | // for AWS these are always 'allow' 55 | /** @internal */ 56 | export interface NetworkRuleProps { 57 | readonly type: 'ingress' | 'egress' 58 | readonly priority?: number 59 | readonly protocol: 'icmp' | 'tcp' | 'udp' | number // IPv4 protocol id 60 | // TODO: icmp has extra settings for AWS 61 | readonly port: number | [number, number] // only applicable to L4 protocols 62 | // source or destination 63 | readonly target: string // can be _a lot_ of different things 64 | } 65 | 66 | //# resource = true 67 | /** @internal */ 68 | export declare class NetworkRule { 69 | constructor(network: Network, props: NetworkRuleProps) 70 | } 71 | 72 | //# resource = true 73 | /** @internal */ 74 | export declare class Firewall {} 75 | 76 | //# resource = true 77 | export declare class HostedZone { 78 | readonly name: string 79 | constructor(name: string) 80 | createSubdomain(name: string): HostedZone 81 | 82 | //# resource = true 83 | createRecord(record: ResourceRecord): void 84 | } 85 | 86 | interface NsRecord { 87 | type: 'NS' 88 | name: string 89 | ttl: number 90 | value: string 91 | } 92 | 93 | interface AliasRecord { 94 | type: 'A' 95 | name: string 96 | ttl: number 97 | value: string 98 | } 99 | 100 | interface CNameRecord { 101 | type: 'CNAME' 102 | name: string 103 | ttl: number 104 | value: string 105 | } 106 | 107 | export type ResourceRecord = CNameRecord | NsRecord | AliasRecord 108 | -------------------------------------------------------------------------------- /src/runtime/srl/websites.ts: -------------------------------------------------------------------------------- 1 | ///# moduleId = synapse:srl/websites 2 | 3 | import { HttpHandler, HttpFetcher, CapturedPattern } from 'synapse:http' 4 | import { HostedZone } from 'synapse:srl/net' 5 | 6 | 7 | export interface JSXElement< 8 | P = any, 9 | T extends string | JSXElementConstructor

    = string | JSXElementConstructor

    10 | > { 11 | type: T 12 | props: P 13 | key: string | null 14 | } 15 | 16 | export type JSXNode

    = JSXElement

    | Iterable> 17 | 18 | type JSXElementConstructor

    = ((props: P) => JSXNode

    ) | (new (props: P) => Component) 19 | 20 | interface Component { 21 | render(): JSXElement 22 | } 23 | 24 | type FunctionComponent

    = (props: P, context?: C) => U 25 | // type ComponentType

    = (new (props: P) => Component) | FunctionComponent

    26 | type ComponentType

    = FunctionComponent 27 | 28 | export interface Layout { 29 | readonly parent?: Layout 30 | // readonly stylesheet?: Stylesheet 31 | readonly component: ComponentType<{ children: JSXNode }> 32 | } 33 | 34 | export interface Page = {}, U = any> { 35 | readonly layout: Layout 36 | readonly component: ComponentType 37 | } 38 | 39 | export interface RouteablePage = {}> extends Page { 40 | readonly route: string 41 | } 42 | 43 | //# resource = true 44 | export declare class Website { 45 | readonly url: string 46 | constructor(options?: { domain?: HostedZone }) 47 | 48 | addAsset(source: string, name?: string, contentType?: string): string 49 | 50 | addPage(route: T, page: Page>): RouteablePage> 51 | addPage(route: T, page: Page, U>, context: U): RouteablePage> 52 | 53 | // XXX: having the `string` overload makes things work correctly ??? 54 | addHandler(route: T, handler: HttpHandler): HttpFetcher 55 | addHandler(route: T, handler: HttpHandler): HttpFetcher 56 | 57 | bind(handler: (...args: T) => Promise | U): (...args: T) => Promise 58 | } 59 | -------------------------------------------------------------------------------- /src/runtime/utils.ts: -------------------------------------------------------------------------------- 1 | import * as vm from 'node:vm' 2 | import * as path from 'node:path' 3 | import { SyncFs } from '../system' 4 | import { throwIfNotFileNotFoundError } from '../utils' 5 | 6 | export interface Context { 7 | readonly vm: vm.Context 8 | readonly globals: typeof globalThis 9 | } 10 | 11 | export function copyGlobalThis(): Context { 12 | const ctx = vm.createContext() 13 | const globals = vm.runInContext('this', ctx) 14 | const keys = new Set(Object.getOwnPropertyNames(globals)) 15 | 16 | const propDenyList = new Set([ 17 | 'crypto', // Node adds a prop that throws if in a different context 18 | 'console', // We want to add our own 19 | ]) 20 | 21 | const descriptors = Object.getOwnPropertyDescriptors(globalThis) 22 | for (const k of Object.keys(descriptors)) { 23 | if (!keys.has(k) && !propDenyList.has(k)) { 24 | Object.defineProperty(globals, k, descriptors[k]) 25 | } 26 | } 27 | 28 | globals.ArrayBuffer = ArrayBuffer 29 | // Needed for `esbuild` 30 | globals.Uint8Array = Uint8Array 31 | 32 | globals.console = globalThis.console 33 | 34 | Object.defineProperty(globals, 'crypto', { 35 | value: globalThis.crypto, 36 | writable: false, 37 | configurable: false, 38 | }) 39 | 40 | return { vm: ctx, globals } 41 | } 42 | 43 | export type CodeCache = ReturnType 44 | export function createCodeCache(fs: Pick, cacheDir: string) { 45 | function getCachedData(key: string): Buffer | undefined { 46 | const filePath = path.resolve(cacheDir, key) 47 | 48 | try { 49 | const d = fs.readFileSync(filePath) 50 | 51 | return Buffer.isBuffer(d) ? d : Buffer.from(d) 52 | } catch (e) { 53 | throwIfNotFileNotFoundError(e) 54 | } 55 | } 56 | 57 | function setCachedData(key: string, data: Uint8Array) { 58 | const filePath = path.resolve(cacheDir, key) 59 | fs.writeFileSync(filePath, data) 60 | } 61 | 62 | function evictCachedData(key: string) { 63 | const filePath = path.resolve(cacheDir, key) 64 | fs.deleteFileSync(filePath) 65 | } 66 | 67 | return { getCachedData, setCachedData, evictCachedData } 68 | } 69 | 70 | -------------------------------------------------------------------------------- /src/services/analytics/deviceId.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { getFs } from '../../execution' 3 | import { getHash } from '../../utils' 4 | import { createMemento } from '../../utils/memento' 5 | import { runCommand } from '../../utils/process' 6 | import { randomUUID } from 'node:crypto' 7 | import { getUserSynapseDirectory } from '../../workspaces' 8 | 9 | // https://github.com/denisbrodbeck/machineid 10 | 11 | async function getDarwinId() { 12 | const res = await runCommand('/usr/sbin/ioreg', ['-rd1', '-c', 'IOPlatformExpertDevice']) 13 | const m = res.match(/"IOPlatformUUID" = "([0-9-]+)"/) 14 | if (!m) { 15 | return 16 | } 17 | 18 | return getHash(m[1]) 19 | } 20 | 21 | async function getLinuxId() { 22 | const d = await getFs().readFile('/var/lib/dbus/machine-id', 'utf-8').catch(e => { 23 | return getFs().readFile('/etc/machine-id', 'utf-8').catch(e => {}) 24 | }) 25 | 26 | if (!d) { 27 | return 28 | } 29 | 30 | return getHash(d.trim()) 31 | } 32 | 33 | async function getMachineId() { 34 | switch (process.platform) { 35 | case 'darwin': 36 | return getDarwinId() 37 | case 'linux': 38 | return getLinuxId() 39 | } 40 | } 41 | 42 | let deviceId: string 43 | async function _getDeviceId() { 44 | const memento = createMemento(getFs(), path.resolve(getUserSynapseDirectory(), 'memento')) 45 | const deviceId = await memento.get('deviceId') 46 | if (deviceId) { 47 | return deviceId 48 | } 49 | 50 | const machineId = await getMachineId() 51 | if (machineId) { 52 | await memento.set('deviceId', machineId) 53 | 54 | return machineId 55 | } 56 | 57 | const newDeviceId = getHash(randomUUID()) 58 | await memento.set('deviceId', newDeviceId) 59 | 60 | return newDeviceId 61 | } 62 | 63 | export async function getDeviceId() { 64 | return deviceId ??= await _getDeviceId() 65 | } 66 | 67 | async function approximateProjectId() { 68 | // 1. Get the current root dir 69 | // 2. Remove prefixes e.g. home dir 70 | // 3. Hash it 71 | } -------------------------------------------------------------------------------- /src/services/analytics/index.ts: -------------------------------------------------------------------------------- 1 | import { getLogger } from '../../logging' 2 | import { readKey } from '../../cli/config' 3 | import { memoize } from '../../utils' 4 | import { AnalyticsEvent } from './backend' 5 | import { connect } from './daemon' 6 | 7 | const pendingEvents = new Set>() 8 | 9 | export function emitEvent(ev: AnalyticsEvent) { 10 | if (isAnalyticsDisabledByEnv()) { 11 | return 12 | } 13 | 14 | const p = _emit(ev).finally(() => pendingEvents.delete(p)) 15 | pendingEvents.add(p) 16 | } 17 | 18 | export function eagerlyStartDaemon() { 19 | getDaemon() 20 | } 21 | 22 | async function _emit(ev: AnalyticsEvent) { 23 | const daemon = await getDaemon() 24 | 25 | return daemon?.sendAnalytics([ev]) 26 | } 27 | 28 | // Sync check is kept separate so we can skip creating promises 29 | const isAnalyticsDisabledByEnv = memoize(() => { 30 | if (process.env['DO_NOT_TRACK'] && process.env['DO_NOT_TRACK'] !== '0') { 31 | return true 32 | } 33 | 34 | if (process.env['SYNAPSE_NO_ANALYTICS'] && process.env['SYNAPSE_NO_ANALYTICS'] !== '0') { 35 | return true 36 | } 37 | }) 38 | 39 | const isAnalyticsDisabled = memoize(async () => { 40 | return isAnalyticsDisabledByEnv() || (await readKey('cli.analytics')) === false 41 | }) 42 | 43 | const getDaemon = memoize(async () => { 44 | if (await isAnalyticsDisabled()) { 45 | return 46 | } 47 | 48 | try { 49 | return await connect() 50 | } catch (e) { 51 | if (!(e as any).message.includes('has not been deployed')) { 52 | getLogger().error(e) 53 | } 54 | } 55 | }) 56 | 57 | export async function shutdown() { 58 | const start = performance.now() 59 | 60 | if (pendingEvents.size === 0) { 61 | if (getDaemon.cached) { 62 | (await getDaemon())?.dispose() 63 | } 64 | return 65 | } 66 | 67 | const timer = setTimeout(async () => { 68 | if (getDaemon.cached) { 69 | getLogger().warn(`Forcibly destroying analytics socket`) 70 | ;(await getDaemon())?.destroySocket() 71 | } 72 | }, 100).unref() 73 | 74 | await Promise.all(pendingEvents) 75 | ;(await getDaemon())?.dispose() 76 | clearTimeout(timer) 77 | getLogger().debug(`analytics shut down time: ${Math.floor((performance.now() - start) * 100) / 100}ms`) 78 | } 79 | 80 | interface CommandEventAttributes { 81 | readonly name: string 82 | readonly duration: number 83 | readonly errorCode?: string 84 | // cliVersion 85 | // OS 86 | // arch 87 | // maybe shell 88 | } 89 | 90 | interface CommandEvent extends AnalyticsEvent { 91 | readonly type: 'command' 92 | readonly attributes: CommandEventAttributes 93 | } 94 | 95 | export function emitCommandEvent(attributes: CommandEventAttributes) { 96 | emitEvent({ 97 | type: 'command', 98 | timestamp: new Date().toISOString(), 99 | attributes, 100 | } satisfies CommandEvent) 101 | } 102 | 103 | const legalNotice = ` 104 | The Synapse CLI collects anonymous usage data. You can opt-out by running the command \`syn config synapse.cli.analytics false\`. 105 | 106 | For more information on what is collected, see the following documentation: 107 | ` -------------------------------------------------------------------------------- /src/services/secrets/inmem.ts: -------------------------------------------------------------------------------- 1 | import { ServiceProvider, getServiceRegistry } from '../../deploy/registry' 2 | import type { SecretProviderProps } from '../../runtime/modules/core' 3 | import { memoize } from '../../utils' 4 | 5 | 6 | export function createInmemSecretService() { 7 | const providers = new Map() 8 | 9 | async function getSecret(secretType: string) { 10 | const provider = [...providers.values()].find(x => x.secretType === secretType) 11 | if (!provider) { 12 | throw new Error(`No secret provider found: ${secretType}`) 13 | } 14 | 15 | return await provider.getSecret() 16 | } 17 | 18 | function _getBinding(): ServiceProvider { 19 | return { 20 | kind: 'secret-provider', 21 | load: (id, config) => void providers.set(id, config), 22 | unload: (id) => void providers.delete(id), 23 | } 24 | } 25 | 26 | return { 27 | getSecret, 28 | _getBinding, 29 | } 30 | } 31 | 32 | export const getInmemSecretService = memoize(createInmemSecretService) 33 | 34 | // getServiceRegistry().registerServiceProvider( 35 | // getInmemSecretService()._getBinding() 36 | // ) -------------------------------------------------------------------------------- /src/static-solver/index.ts: -------------------------------------------------------------------------------- 1 | export * from './scopes' 2 | export * from './solver' 3 | export * from './compiler' 4 | export { printNodes } from './utils' -------------------------------------------------------------------------------- /src/utils/caches.ts: -------------------------------------------------------------------------------- 1 | import { memoize } from '../utils' 2 | import { Memento } from './memento' 3 | 4 | export interface TtlCache extends Omit { 5 | /** `ttl` is in seconds */ 6 | set(key: string, value: T, ttl: number): Promise 7 | } 8 | 9 | export function createTtlCache(memento: Memento): TtlCache { 10 | const manifestKey = '__ttl-manifest__' 11 | 12 | function _getManifest() { 13 | return memento.get>(manifestKey, {}) 14 | } 15 | 16 | // We're assuming there is only 1 writer per-memento 17 | const getManifest = memoize(_getManifest) 18 | 19 | async function updateManifest(entries: Record) { 20 | const m = await getManifest() 21 | await memento.set(manifestKey, { ...m, ...entries }) 22 | } 23 | 24 | async function putEntry(key: string, ttl: number) { 25 | await updateManifest({ [key]: Date.now() + (ttl * 1000) }) 26 | } 27 | 28 | async function deleteEntry(key: string) { 29 | await updateManifest({ [key]: undefined }) 30 | } 31 | 32 | async function isInvalid(key: string) { 33 | const m = await getManifest() 34 | if (!m[key]) { 35 | return // Maybe always return true here? 36 | } 37 | 38 | return Date.now() >= m[key] 39 | } 40 | 41 | async function get(key: string, defaultValue?: T): Promise { 42 | if (await isInvalid(key) === true) { 43 | await deleteEntry(key) 44 | await memento.delete(key) 45 | 46 | return defaultValue 47 | } 48 | 49 | return memento.get(key, defaultValue) 50 | } 51 | 52 | async function set(key: string, value: T, ttl: number) { 53 | await putEntry(key, ttl) 54 | await memento.set(key, value) 55 | } 56 | 57 | async function _delete(key: string) { 58 | await deleteEntry(key) 59 | await memento.delete(key) 60 | } 61 | 62 | return { get, set, delete: _delete } 63 | } -------------------------------------------------------------------------------- /src/utils/memento.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { Fs } from '../system' 3 | import { throwIfNotFileNotFoundError } from '../utils' 4 | 5 | // Roughly inspired by VS Code's "Memento" API 6 | 7 | export interface Memento { 8 | get(key: string): Promise 9 | get(key: string, defaultValue: T): Promise 10 | set(key: string, value: T): Promise 11 | delete(key: string): Promise 12 | } 13 | 14 | export function createMemento(fs: Pick, dir: string): Memento { 15 | const getLocation = (key: string) => path.resolve(dir, key) 16 | 17 | async function get(key: string, defaultValue?: T): Promise { 18 | try { 19 | return JSON.parse(await fs.readFile(getLocation(key), 'utf-8')) 20 | } catch (e) { 21 | throwIfNotFileNotFoundError(e) 22 | 23 | // Delete data when JSON is malformed? 24 | return defaultValue 25 | } 26 | } 27 | 28 | async function set(key: string, value: T) { 29 | await fs.writeFile(getLocation(key), JSON.stringify(value)) 30 | } 31 | 32 | async function _delete(key: string) { 33 | await fs.deleteFile(getLocation(key)).catch(throwIfNotFileNotFoundError) 34 | } 35 | 36 | return { get, set, delete: _delete } 37 | } 38 | 39 | export interface TypedMemento { 40 | get(): Promise 41 | set(value: T): Promise 42 | } 43 | 44 | export function createTypedMemento(memento: Memento, key: string): TypedMemento { 45 | return { 46 | get: () => memento.get(key), 47 | set: value => memento.set(key, value), 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/utils/stateless-watcher/binding.ts: -------------------------------------------------------------------------------- 1 | import * as watcher from './watcher' 2 | import { getResolvedTsConfig } from '../../compiler/config' 3 | import { getFs } from '../../execution' 4 | import { throwIfNotFileNotFoundError } from '../../utils' 5 | import { getWatcherStateFilePath, getWorkingDir } from '../../workspaces' 6 | 7 | async function getWatcherSettings(): Promise { 8 | const config = await getResolvedTsConfig() 9 | 10 | return { 11 | extnames: ['ts', 'tsx'], 12 | included_patterns: config?.include, 13 | excluded_patterns: config?.exclude, 14 | excluded_dirnames: ['node_modules'], 15 | } 16 | } 17 | 18 | export async function maybeDetectChanges() { 19 | const watcherState = await getFs().readFile(getWatcherStateFilePath()).catch(throwIfNotFileNotFoundError) 20 | if (!watcherState) { 21 | return 22 | } 23 | 24 | const settings = await getWatcherSettings() 25 | const result = await watcher.detectChanges(watcherState, getWorkingDir(), settings) 26 | if (!result) { 27 | return [] 28 | } 29 | 30 | await getFs().writeFile(getWatcherStateFilePath(), new Uint8Array(result.state)) 31 | 32 | return result.changes 33 | } 34 | 35 | export async function initOrUpdateWatcherState() { 36 | const settings = await getWatcherSettings() 37 | const watcherState = await getFs().readFile(getWatcherStateFilePath()).catch(throwIfNotFileNotFoundError) 38 | if (!watcherState) { 39 | const state = await watcher.initState(getWorkingDir(), settings) 40 | if (!state) { 41 | throw new Error('Fatal error initializing state') 42 | } 43 | 44 | await getFs().writeFile(getWatcherStateFilePath(), new Uint8Array(state)) 45 | 46 | return 47 | } 48 | 49 | const result = await watcher.detectChanges(watcherState, getWorkingDir(), settings) 50 | if (result) { 51 | await getFs().writeFile(getWatcherStateFilePath(), new Uint8Array(result.state)) 52 | 53 | return result.changes 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/utils/stateless-watcher/mem.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | 3 | pub fn ConcurrentBumpAllocator(comptime T: type) type { 4 | return struct { 5 | pub const items_per_page = @divFloor(std.mem.page_size, @sizeOf(T)); 6 | const required_bits: u64 = @intFromFloat(@log2(@as(f64, items_per_page))); 7 | const mask = std.math.pow(u64, 2, required_bits) - 1; 8 | 9 | count: std.atomic.Value(u64) = std.atomic.Value(u64).init(0), 10 | pages: std.ArrayList([]T), 11 | page_allocator: std.mem.Allocator, 12 | mutex: std.Thread.Mutex, 13 | 14 | pub fn init(allocator: std.mem.Allocator, page_allocator: std.mem.Allocator) @This() { 15 | return .{ 16 | .pages = std.ArrayList([]T).init(allocator), 17 | .page_allocator = page_allocator, 18 | .mutex = std.Thread.Mutex{}, 19 | }; 20 | } 21 | 22 | pub fn deinit(this: *@This()) void { 23 | for (this.pages.items) |p| { 24 | this.page_allocator.free(p); 25 | } 26 | this.pages.deinit(); 27 | } 28 | 29 | inline fn addPage(this: *@This()) !void { 30 | const page = try this.page_allocator.alloc(T, items_per_page); 31 | 32 | this.mutex.lock(); 33 | defer this.mutex.unlock(); 34 | 35 | try this.pages.append(page); 36 | } 37 | 38 | pub fn push(this: *@This(), value: T) !u64 { 39 | const ret = this.count.fetchAdd(1, .monotonic); 40 | 41 | const local_count = ret % items_per_page; 42 | const page = ret / items_per_page; 43 | if (page == this.pages.items.len) { 44 | try this.addPage(); 45 | } 46 | 47 | this.mutex.lock(); 48 | var p = this.pages.items[page]; 49 | p[local_count] = value; 50 | this.mutex.unlock(); 51 | 52 | return ret; 53 | } 54 | 55 | // Not concurrent safe 56 | pub fn at(this: *const @This(), index: usize) *T { 57 | std.debug.assert(index < this.count.load(.monotonic)); 58 | 59 | const page = index / items_per_page; 60 | const offset = @rem(index, items_per_page); 61 | 62 | return &this.pages.items[page][offset]; 63 | } 64 | }; 65 | } 66 | 67 | -------------------------------------------------------------------------------- /src/zig/fs-ext.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs/promises' 2 | import * as path from 'node:path' 3 | import * as fsExt from './fs-ext.zig' 4 | 5 | function canUseFsExt() { 6 | return process.release.name === 'node-synapse' 7 | } 8 | 9 | export function fastCopyDir(src: string, dst: string) { 10 | if (!canUseFsExt()) { 11 | throw new Error(`"fastCopyDir" is not available in the current runtime`) 12 | } 13 | 14 | const srcDir = path.dirname(src) 15 | const dstDir = path.dirname(dst) 16 | const srcBase = path.basename(src) 17 | const dstBase = path.basename(dst) 18 | 19 | return fsExt.cloneDir(srcDir, srcBase, dstDir, dstBase) 20 | } 21 | 22 | // Faster than `fs.rm(b, { force: true, recursive: true })` by ~50% on darwin (untested elsewhere) 23 | export async function removeDir(dir: string) { 24 | const files = await fs.readdir(dir, { withFileTypes: true }) 25 | const p: Promise[] = [] 26 | for (const f of files) { 27 | if (!f.isDirectory()) { 28 | p.push(fs.rm(path.resolve(dir, f.name))) 29 | } else { 30 | if (!canUseFsExt()) { 31 | p.push(fs.rm(path.resolve(dir, f.name), { recursive: true, force: true })) 32 | } else { 33 | p.push(fsExt.removeDir(dir, f.name)) 34 | } 35 | } 36 | } 37 | 38 | await Promise.all(p) 39 | await fs.rmdir(dir) 40 | } 41 | 42 | export async function cleanDir(dir: string, toKeep: string[]) { 43 | const s = new Set(toKeep) 44 | const files = await fs.readdir(dir, { withFileTypes: true }) 45 | const p: Promise[] = [] 46 | for (const f of files) { 47 | if (s.has(f.name)) continue 48 | 49 | if (!f.isDirectory()) { 50 | p.push(fs.rm(path.resolve(dir, f.name))) 51 | } else { 52 | if (!canUseFsExt()) { 53 | p.push(fs.rm(path.resolve(dir, f.name), { recursive: true, force: true })) 54 | } else { 55 | p.push(fsExt.removeDir(dir, f.name)) 56 | } 57 | } 58 | } 59 | 60 | await Promise.all(p) 61 | } 62 | 63 | export async function linkBin(src: string, dst: string) { 64 | await fsExt.symLinkBin(path.resolve(src), path.resolve(dst)) 65 | } -------------------------------------------------------------------------------- /src/zig/lib/mem.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const builtin = @import("builtin"); 3 | const isWasm = builtin.target.isWasm(); 4 | 5 | pub const allocator = if (isWasm) toAllocator(&std.heap.WasmPageAllocator{}) else std.heap.c_allocator; 6 | 7 | pub fn strlen(source: [*:0]const u8) usize { 8 | var i: usize = 0; 9 | while (source[i] != 0) i += 1; 10 | return i; 11 | } 12 | 13 | export fn memcpy(dest: [*]u8, src: [*]const u8, len: usize) [*]u8 { 14 | for (0..len) |i| { 15 | dest[i] = src[i]; 16 | } 17 | return dest; 18 | } 19 | 20 | export fn memset(dest: [*]u8, fill: u8, count: usize) [*]u8 { 21 | for (0..count) |i| { 22 | dest[i] = fill; 23 | } 24 | return dest; 25 | } 26 | 27 | fn toAllocator(a: *const std.heap.WasmPageAllocator) std.mem.Allocator { 28 | return std.mem.Allocator{ 29 | .ptr = @constCast(a), 30 | .vtable = &std.heap.WasmPageAllocator.vtable, 31 | }; 32 | } 33 | -------------------------------------------------------------------------------- /src/zig/util.ts: -------------------------------------------------------------------------------- 1 | import * as util from './util.zig' 2 | 3 | // This will likely crash if called inside an immediate callback 4 | export function waitForPromise(promise: Promise | T): T { 5 | if (process.release.name !== 'node-synapse') { 6 | throw new Error(`"waitForPromise" is not available in the current runtime`) 7 | } 8 | 9 | if (promise instanceof Promise) { 10 | return util.waitForPromise(promise) 11 | } 12 | 13 | if (!!promise && typeof promise === 'object' && 'then' in promise) { 14 | return util.waitForPromise(promise) 15 | } 16 | 17 | return promise 18 | } -------------------------------------------------------------------------------- /src/zig/util.zig: -------------------------------------------------------------------------------- 1 | const js = @import("js"); 2 | 3 | pub fn waitForPromise(p: *js.Value) *js.Value { 4 | return js.waitForPromise(p) catch unreachable; 5 | } 6 | 7 | comptime { 8 | js.registerModule(@This()); 9 | } 10 | 11 | -------------------------------------------------------------------------------- /src/zig/win32/load-hook.zig: -------------------------------------------------------------------------------- 1 | // ref: https://github.com/nodejs/node-gyp/blob/af876e10f01ea8e3fdfeee20dbee3f7138ccffd5/src/win_delay_load_hook.cc 2 | // This file won't be useful until Zig has an equivalent to the MSVC `/delayload` switch 3 | 4 | const std = @import("std"); 5 | const DWORD = std.os.windows.DWORD; 6 | const FARPROC = std.os.windows.FARPROC; 7 | const HMODULE = std.os.windows.HMODULE; 8 | const LPCSTR = std.os.windows.LPCSTR; 9 | const WINAPI = std.os.windows.WINAPI; 10 | const eql = std.mem.eql; 11 | 12 | 13 | const Event = enum(c_uint) { 14 | dliStartProcessing, // used to bypass or note helper only 15 | dliNotePreLoadLibrary, // called just before LoadLibrary, can 16 | // override w/ new HMODULE return val 17 | dliNotePreGetProcAddress, // called just before GetProcAddress, can 18 | // override w/ new FARPROC return value 19 | dliFailLoadLib, // failed to load library, fix it by 20 | // returning a valid HMODULE 21 | dliFailGetProc, // failed to get proc address, fix it by 22 | // returning a valid FARPROC 23 | dliNoteEndProcessing, // called after all processing is done, no 24 | // bypass possible at this point except 25 | // by longjmp()/throw()/RaiseException. 26 | }; 27 | 28 | 29 | 30 | const ImgDelayDescr = opaque {}; 31 | const DelayLoadProc = struct { 32 | fImportByName: bool, 33 | data: union { 34 | szProcName: LPCSTR, 35 | dwOrdinal: DWORD, 36 | }, 37 | }; 38 | 39 | const DelayLoadInfo = struct { 40 | cb: DWORD, // size of structure 41 | pidd: *ImgDelayDescr, // raw form of data (everything is there) 42 | ppfn: *FARPROC, // points to address of function to load 43 | szDll: LPCSTR, // name of dll 44 | dlp: DelayLoadProc, // name or ordinal of procedure 45 | hmodCur: HMODULE, // the hInstance of the library we have loaded 46 | pfnCur: FARPROC, // the actual function that will be called 47 | dwLastError: DWORD, // error received (if an error notification) 48 | }; 49 | 50 | 51 | extern "kernel32" fn GetModuleHandleA(lpModuleName: ?LPCSTR) callconv(WINAPI) ?HMODULE; 52 | 53 | export fn __pfnDliNotifyHook2(dliNotify: Event, pdli: *DelayLoadInfo) callconv(WINAPI) ?FARPROC { 54 | if (dliNotify != .dliNotePreLoadLibrary) { 55 | return null; 56 | } 57 | 58 | std.debug.print("{s}\n", .{pdli.szDll}); 59 | 60 | if (!eql(u8, std.mem.sliceTo(pdli.szDll, 0), "node.exe")) { 61 | return null; 62 | } 63 | 64 | const h = GetModuleHandleA(null); 65 | 66 | return @ptrCast(h); 67 | } 68 | -------------------------------------------------------------------------------- /test/conformance/bucket.ts: -------------------------------------------------------------------------------- 1 | import * as synapse from 'synapse:core' 2 | import { Bucket } from 'synapse:srl/storage' 3 | import { describe, it, test, expect, expectEqual } from 'synapse:test' 4 | 5 | describe('Bucket', () => { 6 | const b = new Bucket() 7 | 8 | it('returns `undefined` if the key is absent', async () => { 9 | const data = await b.get('missing-key') 10 | expectEqual(data, undefined) 11 | }) 12 | 13 | it('returns a blob by default', async () => { 14 | const data = 'hi' 15 | await b.put('my-key', data) 16 | const blob = await b.get('my-key') 17 | expect(blob instanceof Blob) 18 | }) 19 | 20 | it('stores', async () => { 21 | const data = 'hi' 22 | await b.put('my-key', data) 23 | expectEqual(await b.get('my-key', 'utf-8'), data) 24 | }) 25 | 26 | it('stores streams', async () => { 27 | const data = 'hi' 28 | const blob = new Blob([data]) 29 | await b.put('my-key', blob.stream()) 30 | expectEqual(await b.get('my-key', 'utf-8'), data) 31 | }) 32 | 33 | it('deletes', async () => { 34 | await b.put('delete-me', 'dummy') 35 | await b.delete('delete-me') 36 | const data = await b.get('delete-me') 37 | expectEqual(data, undefined) 38 | }) 39 | 40 | it('stores (nested)', async () => { 41 | const data = 'im-nested' 42 | await b.put('my/nested/key', data) 43 | expectEqual(await b.get('my/nested/key', 'utf-8'), data) 44 | }) 45 | 46 | describe('Bucket (Fresh)', () => { 47 | const b = new Bucket() 48 | 49 | it('lists (nested)', async () => { 50 | const data = 'im-nested' 51 | await b.put('list/key', data) 52 | await b.put('list/nested/key1', data) 53 | await b.put('list/nested/key2', data) 54 | 55 | const list = await b.list() 56 | expectEqual(list, ['list/key', 'list/nested/key1', 'list/nested/key2']) 57 | }) 58 | }) 59 | 60 | it('lists with prefix (nested)', async () => { 61 | const data = 'im-nested' 62 | await b.put('list/key', data) 63 | await b.put('list/nested/key1', data) 64 | await b.put('list/nested/key2', data) 65 | 66 | const list = await b.list('list/nested') 67 | expectEqual(list, ['list/nested/key1', 'list/nested/key2']) 68 | }) 69 | 70 | describe('addBlob', () => { 71 | const dest = b.addBlob(require('node:path').resolve(__filename)) 72 | 73 | it('adds a blob', async () => { 74 | expect(await b.get(dest)) 75 | }) 76 | 77 | const assetKey = b.addBlob(synapse.asset('table.ts')) 78 | it('works with assets', async () => { 79 | expect(await b.get(assetKey)) 80 | }) 81 | }) 82 | 83 | describe('stat', () => { 84 | it('returns size', async () => { 85 | const data = Buffer.from('foobar') 86 | await b.put('my-key', data) 87 | const stats = await b.stat('my-key') 88 | expectEqual(stats?.size, data.byteLength) 89 | }) 90 | 91 | it('returns `undefined` for missing items', async () => { 92 | expectEqual(await b.stat('i-do-not-exist'), undefined) 93 | }) 94 | }) 95 | }) 96 | -------------------------------------------------------------------------------- /test/conformance/queue.ts: -------------------------------------------------------------------------------- 1 | import { Queue, Bucket } from 'synapse:srl/storage' 2 | import { describe, it, expectEqual } from 'synapse:test' 3 | import { waitUntil } from './util' 4 | 5 | describe('Queue', () => { 6 | const bucket = new Bucket() 7 | const queue = new Queue<[string, string]>() 8 | 9 | queue.on('message', async ([key, value]) => { 10 | await bucket.put(key, value) 11 | }) 12 | 13 | it('uses listeners', async () => { 14 | await queue.send(['foo', 'bar']) 15 | const actual = await waitUntil(100, 2500, () => bucket.get('foo', 'utf-8')) 16 | expectEqual(actual, 'bar') 17 | await bucket.delete('foo') 18 | }) 19 | }) 20 | -------------------------------------------------------------------------------- /test/conformance/table.ts: -------------------------------------------------------------------------------- 1 | import { Table } from 'synapse:srl/storage' 2 | import { describe, it, expectEqual } from 'synapse:test' 3 | 4 | describe('Table', () => { 5 | const table = new Table() 6 | 7 | it('can set and get items', async () => { 8 | const key = 'foo' 9 | const timestamp = new Date().toISOString() 10 | await table.set(key, timestamp) 11 | expectEqual(await table.get(key), timestamp) 12 | }) 13 | 14 | it('can delete items', async () => { 15 | const key = 'delete-me' 16 | await table.set(key, key) 17 | expectEqual(await table.get(key), key) 18 | await table.delete(key) 19 | expectEqual(await table.get(key), undefined) 20 | }) 21 | }) 22 | -------------------------------------------------------------------------------- /test/conformance/util.ts: -------------------------------------------------------------------------------- 1 | // AWS Lambda will report "done" if the event loop is empty and the microtask queue isn't being emptied 2 | const shouldUnref = process.env.SYNAPSE_TARGET === 'local' 3 | export function sleep(ms: number, unref = shouldUnref) { 4 | return new Promise(r => { 5 | const timer = setTimeout(r, ms) 6 | unref && timer.unref() 7 | }) 8 | } 9 | 10 | export async function waitUntil(delay: number, timeout: number, cb: () => Promise | T | undefined): Promise { 11 | const start = Date.now() 12 | while (Date.now() - start < timeout) { 13 | const actual = await cb() 14 | if (actual !== undefined) { 15 | return actual 16 | } 17 | 18 | await sleep(delay) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /test/fixtures/branches/isolation/main.ts: -------------------------------------------------------------------------------- 1 | import { expectEqual } from 'synapse:test' 2 | 3 | export function main(status: string) { 4 | const lines = status.split('\n') 5 | expectEqual(lines[0], 'No packages installed') 6 | } 7 | 8 | // !commands 9 | // echo "skipped"; exit 0 10 | // synapse clean 11 | // export CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) 12 | // synapse compile 13 | // git checkout -b test-branch-isolation 14 | // # TODO: this test will eventually fail once smarter caching is enabled 15 | // export STATUS=$(synapse status) 16 | // synapse compile 17 | // synapse run -- "$STATUS" 18 | // 19 | // @finally synapse clean 20 | // @finally if [ -n "$CURRENT_BRANCH" ]; then git checkout "$CURRENT_BRANCH"; fi 21 | // @finally git branch -d test-branch-isolation || true 22 | -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file-nested/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | const b = new Bucket() 4 | 5 | // !commands 6 | // # It's important that the `nested` dir exists before we run the test 7 | // 8 | // synapse deploy main.ts 9 | // 10 | // cp main.ts nested/dir/main2.ts 11 | // 12 | // synapse deploy nested/dir/main2.ts 13 | // synapse show main.ts#b 14 | // synapse show nested/dir/main2.ts#b 15 | // 16 | // @finally rm nested/dir/main2.ts -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file-nested2/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | // const b = new Bucket() 4 | 5 | // !commands 6 | // synapse compile 7 | // 8 | // cp main.ts nested/dir/main2.ts 9 | // @toggleComment nested/dir/main2.ts 3 10 | // 11 | // synapse deploy nested/dir/main2.ts 12 | // synapse show nested/dir/main2.ts#b 13 | // 14 | // @finally rm nested/dir/main2.ts -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file-nested3/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | // const b = new Bucket() 4 | 5 | // !commands 6 | // synapse compile 7 | // 8 | // cp main.ts nested/dir/main2.ts 9 | // synapse deploy @expectFail 10 | // 11 | // @toggleComment nested/dir/main2.ts 3 12 | // 13 | // synapse deploy 14 | // synapse show nested/dir/main2.ts#b 15 | // 16 | // @finally rm nested/dir/main2.ts -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | const b = new Bucket() 4 | 5 | // !commands 6 | // synapse deploy main.ts 7 | // cp main.ts main2.ts 8 | // synapse deploy main2.ts 9 | // synapse show main.ts#b 10 | // synapse show main2.ts#b 11 | // 12 | // @finally rm main2.ts -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file2/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | // const b = new Bucket() 4 | 5 | // !commands 6 | // synapse compile 7 | // synapse deploy @expectFail 8 | // 9 | // cp main.ts main2.ts 10 | // @toggleComment main2.ts 3 11 | // synapse deploy @expectFail 12 | // 13 | // @toggleComment 3 14 | // synapse deploy 15 | // 16 | // mkdir -p foo 17 | // cp main.ts foo/main3.ts 18 | // synapse deploy 19 | // synapse show foo/main3.ts#b 20 | // 21 | // 22 | // @finally rm main2.ts 23 | // @finally rm -rf foo -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file2/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["main.ts", "foo"] 3 | } -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file3/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | // const b = new Bucket() 4 | 5 | // !commands 6 | // synapse compile 7 | // synapse deploy @expectFail 8 | // 9 | // mkdir -p foo 10 | // cp main.ts foo/bar.ts 11 | // @toggleComment foo/bar.ts 3 12 | // synapse deploy @expectFail 13 | // 14 | // cp foo/bar.ts foo/bbar.ts # Wildcard matching 15 | // synapse deploy @expectFail 16 | // 17 | // mkdir -p foo/foo # Nested matching 18 | // cp foo/bar.ts foo/foo/bar.ts 19 | // synapse deploy @expectFail 20 | // 21 | // mkdir -p foo/bar/foo # Nested globstar matching 22 | // cp foo/bar.ts foo/bar/foo/bar.ts 23 | // synapse deploy @expectFail 24 | // 25 | // cp foo/bar.ts foo/r.ts # Doesn't match exclude pattern 26 | // synapse deploy 27 | // synapse show foo/r.ts#b 28 | // 29 | // 30 | // @finally rm -rf foo -------------------------------------------------------------------------------- /test/fixtures/deploy/added-file3/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["main.ts", "foo"], 3 | "exclude": ["**/foo/*ar.ts"] 4 | } -------------------------------------------------------------------------------- /test/fixtures/deploy/removed-file/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | const b = new Bucket() 4 | 5 | // !commands 6 | // cp main.ts main2.ts 7 | // synapse deploy --symbol main2.ts#b --dry-run 8 | // 9 | // rm main2.ts 10 | // export OUTPUT=$(synapse deploy --dry-run | grep -i '^ ') 11 | // @expectEqual "$OUTPUT" " + b main.ts:3:15" 12 | // 13 | // 14 | // @finally rm -f main2.ts -------------------------------------------------------------------------------- /test/fixtures/deploy/transitive-side-effect/b.ts: -------------------------------------------------------------------------------- 1 | import { defineResource } from 'synapse:core' 2 | import { Bucket } from 'synapse:srl/storage' 3 | 4 | export const b = new Bucket() 5 | 6 | class Obj extends defineResource({ 7 | create: async () => { 8 | await b.put('foo', 'bar') 9 | }, 10 | }) {} 11 | 12 | new Obj() 13 | -------------------------------------------------------------------------------- /test/fixtures/deploy/transitive-side-effect/main.ts: -------------------------------------------------------------------------------- 1 | import { b } from './b' 2 | import { expectEqual } from 'synapse:test' 3 | 4 | export async function main() { 5 | expectEqual(await b.get('foo', 'utf-8'), 'bar') 6 | } 7 | 8 | // !commands 9 | // synapse run @input Y -------------------------------------------------------------------------------- /test/fixtures/deploy/transitive-side-effect2/b.ts: -------------------------------------------------------------------------------- 1 | import { defineResource } from 'synapse:core' 2 | import { Bucket } from 'synapse:srl/storage' 3 | 4 | export const b = new Bucket() 5 | 6 | class Obj extends defineResource({ 7 | create: async () => { 8 | await b.put('foo', 'bar') 9 | }, 10 | }) {} 11 | 12 | new Obj() 13 | -------------------------------------------------------------------------------- /test/fixtures/deploy/transitive-side-effect2/main.ts: -------------------------------------------------------------------------------- 1 | import { b } from './b' 2 | import { expectEqual } from 'synapse:test' 3 | import { Bucket } from 'synapse:srl/storage' 4 | 5 | // Force the current file to be "deployable" 6 | const b2 = new Bucket() 7 | 8 | export async function main() { 9 | await b2.get('a') 10 | expectEqual(await b.get('foo', 'utf-8'), 'bar') 11 | } 12 | 13 | // !commands 14 | // synapse run @input Y -------------------------------------------------------------------------------- /test/fixtures/env/main.ts: -------------------------------------------------------------------------------- 1 | import { defineDataSource } from 'synapse:core' 2 | import { test, expectEqual } from 'synapse:test' 3 | 4 | const expected = process.env.SYNAPSE_ENV === 'test' ? 'bar' : 'foo' 5 | 6 | const foo = process.env.foo 7 | test('loads env file for synthesis', () => { 8 | expectEqual(foo, expected) 9 | }) 10 | 11 | test('loads env file for tests', () => { 12 | expectEqual(process.env.foo, expected) 13 | }) 14 | 15 | { 16 | const getFoo = defineDataSource(() => process.env.foo) 17 | const foo = getFoo() 18 | test('loads env file for deploy', () => { 19 | expectEqual(foo, expected) 20 | }) 21 | } 22 | 23 | export function main(output: string, expectedFoo: string) { 24 | const lines = output.split('\n') 25 | expectEqual(lines.at(-1), 'Skipped 3 unchanged tests') 26 | expectEqual(expected, expectedFoo) 27 | } 28 | 29 | // !commands 30 | // echo "foo=foo" > .env 31 | // synapse test 32 | // echo "foo=bar" > .env.test 33 | // export SYNAPSE_ENV=test 34 | // synapse test 35 | // 36 | // # Unrelated env var changes should not re-synth 37 | // synapse deploy # XXX: needed because the module itself is a resource 38 | // export BAR=bar 39 | // export OUTPUT=$(synapse test) 40 | // synapse run -- "$OUTPUT" bar 41 | // 42 | // # Env vars should trigger a recompilation on deploy 43 | // unset SYNAPSE_ENV 44 | // synapse deploy 45 | // synapse run -- "$OUTPUT" foo 46 | // 47 | // @finally rm -f .env 48 | // @finally rm -f .env.test 49 | -------------------------------------------------------------------------------- /test/fixtures/pm/pkg-archive/main.ts: -------------------------------------------------------------------------------- 1 | import { Client } from 'pkg-b' 2 | 3 | export async function main() { 4 | const c = new Client({ authorization: () => 'authz' }) 5 | console.log(await c.bar()) 6 | } 7 | 8 | // !commands 9 | // (cd pkg-b && synapse deploy && synapse publish --archive out/pkg.tgz) 10 | // synapse add pkg-b/out/pkg.tgz 11 | // synapse run 12 | -------------------------------------------------------------------------------- /test/fixtures/pm/pkg-archive/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "pkg-b": "file:pkg-b/out/pkg.tgz" 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/pm/pkg-archive/pkg-b/main.ts: -------------------------------------------------------------------------------- 1 | import { Service } from 'synapse:services' 2 | 3 | class Foo extends Service { 4 | public bar() { 5 | return { bar: 'bar' } 6 | } 7 | } 8 | 9 | const foo = new Foo() 10 | foo.addAuthorizer(() => {}) 11 | 12 | export const Client = foo.createClientClass() 13 | 14 | -------------------------------------------------------------------------------- /test/fixtures/pm/pkg-archive/pkg-b/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pkg-b", 3 | "exports": "./main.ts", 4 | "tsconfig": { 5 | "compilerOptions": { 6 | "declaration": true 7 | } 8 | } 9 | } -------------------------------------------------------------------------------- /test/fixtures/pm/provider-version/main.ts: -------------------------------------------------------------------------------- 1 | import * as aws from 'synapse-provider:aws' 2 | import { test, expectEqual } from 'synapse:test' 3 | 4 | test('provider version matches', () => { 5 | expectEqual(aws.version, '5.53.0') 6 | }) 7 | 8 | // !commands 9 | // synapse test 10 | -------------------------------------------------------------------------------- /test/fixtures/pm/provider-version/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "synapse": { 3 | "providers": { 4 | "aws": "5.53.0" 5 | } 6 | } 7 | } -------------------------------------------------------------------------------- /test/fixtures/pm/unofficial-provider/main.ts: -------------------------------------------------------------------------------- 1 | import * as alicloud from 'synapse-provider:alicloud' 2 | 3 | const disk = new alicloud.Disk() 4 | 5 | // !commands 6 | // synapse compile --target local 7 | -------------------------------------------------------------------------------- /test/fixtures/pm/unofficial-provider/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "synapse": { 3 | "providers": { 4 | "aliyun/alicloud": "*" 5 | } 6 | } 7 | } -------------------------------------------------------------------------------- /test/fixtures/run/added-file-nested/main.ts: -------------------------------------------------------------------------------- 1 | export function main() {} 2 | 3 | // !commands 4 | // # It's important that the `nested` dir exists before we run the test 5 | // 6 | // synapse run main.ts 7 | // 8 | // cp main.ts nested/dir/main2.ts 9 | // synapse run nested/dir/main2.ts 10 | // 11 | // @finally rm nested/dir/main2.ts -------------------------------------------------------------------------------- /test/fixtures/run/added-file/main.ts: -------------------------------------------------------------------------------- 1 | export function main() {} 2 | 3 | // !commands 4 | // synapse compile main.ts 5 | // synapse run main.ts 6 | // cp main.ts main2.ts 7 | // synapse run main2.ts 8 | // 9 | // @finally rm main2.ts -------------------------------------------------------------------------------- /test/fixtures/run/auto-install/main.ts: -------------------------------------------------------------------------------- 1 | import ts from 'typescript' 2 | 3 | export async function main() { 4 | console.log(ts.version) 5 | } 6 | 7 | // !commands 8 | // synapse clean 9 | // synapse run 10 | 11 | -------------------------------------------------------------------------------- /test/fixtures/run/auto-install/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "typescript": "5.4.5" 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/run/main-fn/main.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | import { expectEqual } from 'synapse:test' 3 | 4 | const fn = new Function(() => 'Hello, world!') 5 | 6 | export async function main() { 7 | expectEqual(await fn(), 'Hello, world!') 8 | } 9 | 10 | // !commands 11 | // synapse deploy 12 | // synapse run 13 | -------------------------------------------------------------------------------- /test/fixtures/run/prompt-deploy/main.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | 3 | const message = process.env.TEST_MESSAGE 4 | 5 | const hello = new Function(() => message) 6 | 7 | export async function main(...args: string[]) { 8 | console.log(await hello()) 9 | } 10 | 11 | // !commands 12 | // export TEST_MESSAGE=hello 13 | // synapse run @input Y 14 | // @expectEqual "$(synapse run)" hello 15 | // 16 | // # env var change detection only looks at `*.env` files 17 | // # Though I think we should do more than that, while making 18 | // # it clear that the env vars are implicit 19 | // exit 0 20 | // 21 | // export TEST_MESSAGE=goodbye 22 | // synapse run @input N 23 | // @expectEqual "$(synapse run)" hello 24 | // 25 | // synapse compile # Force recompilation 26 | // synapse run @input Y 27 | // @expectEqual "$(synapse run)" goodbye 28 | -------------------------------------------------------------------------------- /test/fixtures/run/two-mains/main.ts: -------------------------------------------------------------------------------- 1 | // !commands 2 | // synapse run one.ts -- one 3 | // synapse run two.ts -- two 4 | -------------------------------------------------------------------------------- /test/fixtures/run/two-mains/one.ts: -------------------------------------------------------------------------------- 1 | import { expectEqual } from 'synapse:test' 2 | 3 | export function main(arg: string) { 4 | expectEqual(arg, 'one') 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/run/two-mains/two.ts: -------------------------------------------------------------------------------- 1 | import { expectEqual } from 'synapse:test' 2 | 3 | export function main(arg: string) { 4 | expectEqual(arg, 'two') 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/run/variations/folder/hello.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | 3 | const hello = new Function(() => { 4 | return { message: 'hello, world?' } 5 | }) 6 | 7 | export async function main(...args: string[]) { 8 | console.log(await hello()) 9 | } 10 | -------------------------------------------------------------------------------- /test/fixtures/run/variations/main.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | 3 | const hello = new Function(() => { 4 | return { message: 'hello, world!' } 5 | }) 6 | 7 | export async function main(...args: string[]) { 8 | console.log(await hello()) 9 | } 10 | 11 | // !commands 12 | // synapse deploy ./main.ts 13 | // synapse deploy main.ts 14 | // @expectEqual "$(synapse run ./main.ts)" "{ message: 'hello, world!' }" 15 | // 16 | // synapse deploy ./folder/hello.ts 17 | // synapse deploy folder/hello.ts 18 | // @expectEqual "$(synapse run ./folder/hello.ts)" "{ message: 'hello, world?' }" -------------------------------------------------------------------------------- /test/fixtures/source-maps/compile-time.ts: -------------------------------------------------------------------------------- 1 | import { Bundle } from 'synapse:lib' 2 | import { symEval, bindFunctionModel } from 'synapse:core' 3 | 4 | // .infra.js mappings 5 | function infraMappings() { 6 | function bar() { 7 | throw new Error('bar') 8 | } 9 | 10 | bar() 11 | 12 | } 13 | 14 | function permissions() { 15 | function foo() {} 16 | 17 | bindFunctionModel(foo, () => { 18 | throw new Error('Hello!') 19 | }) 20 | 21 | symEval(foo) 22 | } 23 | 24 | function capturing() { 25 | const anonSymbol = Symbol() // TODO: eventually this won't fail, use something else 26 | 27 | function f1() { 28 | console.log(anonSymbol) 29 | } 30 | 31 | function f2() { 32 | f1() 33 | } 34 | 35 | new Bundle(() => f2()) 36 | } 37 | -------------------------------------------------------------------------------- /test/fixtures/source-maps/deploy-time.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import * as core from 'synapse:core' 3 | import { describe, it, expect, expectEqual } from 'synapse:test' 4 | 5 | class MyError extends core.defineResource({ 6 | create: async (key: string) => { 7 | const stack = new Error(key).stack 8 | 9 | return { stack } 10 | }, 11 | }) {} 12 | 13 | const err = new MyError('foo') 14 | 15 | describe('source maps', () => { 16 | it('shows the correct location', () => { 17 | const stack = err.stack 18 | expect(stack) 19 | const firstLine = stack.split('\n')[1] 20 | expect(firstLine, 'Missing first line of stack trace') 21 | const location = firstLine.trim().split(' ')[1] 22 | expect(location, `Missing file location in trace: ${firstLine}`) 23 | 24 | const base = location.split(path.sep).pop()! 25 | expectEqual(base, 'deploy-time.ts:7:23') 26 | }) 27 | }) -------------------------------------------------------------------------------- /test/fixtures/source-maps/main.ts: -------------------------------------------------------------------------------- 1 | 2 | // !commands 3 | // synapse test -------------------------------------------------------------------------------- /test/fixtures/status/opt/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | const b = new Bucket() 4 | 5 | // !commands 6 | // OUTPUT=$(synapse status) 7 | // @expectMatch "$OUTPUT" "Not compiled" 8 | // 9 | // synapse compile 10 | // OUTPUT=$(synapse status) 11 | // @expectMatch "$OUTPUT" "target: local" 12 | // 13 | // # Environment name + different target 14 | // export SYNAPSE_ENV=test 15 | // synapse clean 16 | // synapse compile --target aws 17 | // OUTPUT=$(synapse status) 18 | // @expectMatch "$OUTPUT" "target: aws, env: test" 19 | -------------------------------------------------------------------------------- /test/fixtures/synth/assets/main.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs' 2 | import * as path from 'node:path' 3 | import * as lib from 'synapse:lib' 4 | import { defineResource, asset, runCommand } from 'synapse:core' 5 | import { describe, it, expectEqual } from 'synapse:test' 6 | import { Function } from 'synapse:srl/compute' 7 | 8 | class MyData extends defineResource({ 9 | create: async (asset: string) => { 10 | const data = await lib.readAsset(asset) 11 | 12 | return { data } 13 | }, 14 | }) {} 15 | 16 | const myAsset = asset('./my-data.json') 17 | const myData = new MyData(myAsset) 18 | 19 | async function hello() { 20 | return JSON.parse(myData.data) 21 | } 22 | 23 | const fn = new Function(hello) 24 | 25 | describe('image function', () => { 26 | it('returns the asset data', async () => { 27 | const resp = await fn() 28 | expectEqual(resp.hello, 'world') 29 | }) 30 | }) 31 | 32 | 33 | async function readMyAsset() { 34 | const data = await lib.readAsset(myAsset) 35 | 36 | return JSON.parse(data) 37 | } 38 | 39 | const b = new lib.Bundle({ readMyAsset, __esModule: true }, { includeAssets: true }) 40 | 41 | const a = new lib.Archive(b) 42 | 43 | declare var __buildTarget: { deploymentId: string; buildDir: string } 44 | function getDeployBuildDir() { 45 | return path.resolve(__buildTarget.buildDir, 'deployments', __buildTarget.deploymentId) 46 | } 47 | 48 | const deployBuildDir = getDeployBuildDir() 49 | 50 | // TODO: the logic to support this is commented out in `synapse:lib` 51 | // `extraFiles` needs to be passed to the backend. We don't because 52 | // terraform complains about things being missing in the state 53 | 54 | // it('zips assets', async () => { 55 | // const tmpDir = path.resolve('dist', 'tmp') 56 | // await fs.promises.mkdir(tmpDir, { recursive: true }) 57 | 58 | // const archivePath = path.resolve(deployBuildDir, a.filePath) 59 | 60 | // // Doesn't work with gnu tar 61 | // await runCommand('tar', ['-xzf', archivePath, '-C', tmpDir]) 62 | // //await runCommand('unzip', ['-o', archivePath, '-d', tmpDir]) 63 | 64 | // const h = await import(path.resolve(tmpDir, 'handler.cjs')) 65 | // const resp = await h.readMyAsset() 66 | // expectEqual(resp.hello, 'world') 67 | // }) 68 | 69 | // !commands 70 | // synapse deploy 71 | // synapse test 72 | -------------------------------------------------------------------------------- /test/fixtures/synth/assets/my-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "hello": "world" 3 | } -------------------------------------------------------------------------------- /test/fixtures/synth/cache-indirect/main.ts: -------------------------------------------------------------------------------- 1 | import { HttpService } from 'synapse:srl/compute' 2 | import { fetch } from 'synapse:http' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | const service = new HttpService() 6 | 7 | const goodData = 1 8 | const badData = 0 9 | const data = process.env.USE_GOOD_DATA !== '0' ? goodData : badData 10 | 11 | const getData = service.route('GET', '/data', () => data) 12 | 13 | test('returns good data', async () => { 14 | const resp = await fetch(getData) 15 | expectEqual(resp, goodData) 16 | }) 17 | 18 | // !commands 19 | // # First make sure the test passes normally 20 | // export USE_GOOD_DATA=1 21 | // synapse deploy 22 | // synapse test 23 | // 24 | // # Now we build with bad data 25 | // export USE_GOOD_DATA=0 26 | // synapse compile 27 | // synapse deploy 28 | // synapse test @expectFail 29 | // 30 | // # Check that tests pass again after rebuild 31 | // export USE_GOOD_DATA=1 32 | // synapse compile 33 | // synapse deploy 34 | // synapse test 35 | // 36 | // @finally synapse destroy 37 | // @finally synapse clean 38 | -------------------------------------------------------------------------------- /test/fixtures/synth/capture/main.ts: -------------------------------------------------------------------------------- 1 | import { it, expectEqual } from 'synapse:test' 2 | 3 | function bar() { 4 | return 'bar' 5 | } 6 | 7 | function foo() { 8 | return bar() 9 | } 10 | 11 | class Bar { 12 | public readonly x = foo() 13 | } 14 | 15 | it('captures field initializers', () => { 16 | expectEqual(new Bar().x, 'bar') 17 | }) 18 | 19 | class Foo { 20 | foo() { 21 | return 'foo' 22 | } 23 | } 24 | 25 | function doStuff({ foo = new Foo() } = {}) { 26 | return foo.foo() 27 | } 28 | 29 | 30 | it('captures initializers in object binding patterns', () => { 31 | expectEqual(doStuff(), 'foo') 32 | }) 33 | 34 | function doStuff2(foo = new Foo()) { 35 | return foo.foo() 36 | } 37 | 38 | it('captures parameter initializers', () => { 39 | expectEqual(doStuff2(), 'foo') 40 | }) 41 | 42 | function doStuff3([foo = new Foo()] = []) { 43 | return foo.foo() 44 | } 45 | 46 | it('captures array pattern initializers', () => { 47 | expectEqual(doStuff3(), 'foo') 48 | }) 49 | 50 | class FooFoo { 51 | public foo: string 52 | constructor(foo = new Foo()) { 53 | this.foo = foo.foo() 54 | } 55 | } 56 | 57 | it('captures constructor parameter initializers', () => { 58 | expectEqual(new FooFoo().foo, 'foo') 59 | }) 60 | 61 | // !commands 62 | // synapse compile 63 | // synapse test 64 | -------------------------------------------------------------------------------- /test/fixtures/synth/classes/main.ts: -------------------------------------------------------------------------------- 1 | import { test, expectEqual } from 'synapse:test' 2 | 3 | class A { 4 | foo() { return 'foo' } 5 | } 6 | 7 | test('constructor', () => { 8 | expectEqual(new A().foo(), 'foo') 9 | }) 10 | 11 | const B = class { 12 | foo() { return 'foo' } 13 | } 14 | 15 | test('anonymous class', () => { 16 | expectEqual(new B().foo(), 'foo') 17 | }) 18 | 19 | const foo = new B().foo 20 | test('anonymous class (method)', () => { 21 | expectEqual(foo(), 'foo') 22 | }) 23 | 24 | const getValSym = Symbol.for('getVal') 25 | 26 | class C { 27 | constructor(private readonly val: string) {} 28 | 29 | getVal() { 30 | return this.val 31 | } 32 | 33 | [getValSym]() { 34 | return this.val 35 | } 36 | } 37 | 38 | const c = new C('bar') 39 | const getVal = c.getVal.bind(c) 40 | 41 | test('bound methods', () => { 42 | expectEqual(getVal(), 'bar') 43 | }) 44 | 45 | const getVal2 = c.getVal 46 | test('methods (no bind)', () => { 47 | expectEqual(getVal2.call({ val: 'bar2' }), 'bar2') 48 | }) 49 | 50 | const getVal3 = c[getValSym] 51 | test('methods (computed name)', () => { 52 | expectEqual(getVal3.call({ val: 'bar3' }), 'bar3') 53 | }) 54 | 55 | { 56 | const suffix = '!' 57 | 58 | class D { 59 | constructor(public readonly val: string) {} 60 | 61 | getVal() { 62 | return `${this.val}${suffix}` 63 | } 64 | } 65 | 66 | const d = new D('bar4') 67 | const getVal4 = d.getVal 68 | test('methods (captured)', () => { 69 | expectEqual(getVal4.call({ val: 'bar4' }), 'bar4!') 70 | }) 71 | 72 | class E { 73 | constructor(public readonly val: string) {} 74 | 75 | getVal(count: number) { 76 | return `${this.val}${suffix}`.repeat(count) 77 | } 78 | } 79 | 80 | const e = new E('bar5') 81 | const getVal5 = e.getVal 82 | test('methods (captured with args)', () => { 83 | expectEqual(getVal5.call({ val: 'bar5' }, 2), 'bar5!bar5!') 84 | }) 85 | } 86 | 87 | { 88 | const y = 2 89 | class F { 90 | static readonly x = 1 91 | static foo() { 92 | return this.x + y 93 | } 94 | } 95 | 96 | test('static methods', () => { 97 | expectEqual(F.foo(), 3) 98 | }) 99 | } 100 | 101 | { 102 | // Individuals method should fail to serialize but we should 103 | // still be able to use the class itself 104 | class X { 105 | #foo = 1 106 | 107 | static m() { 108 | const x = new this() 109 | x.#foo = 2 110 | 111 | return x 112 | } 113 | 114 | m() { 115 | return this.#foo 116 | } 117 | 118 | m2() { 119 | return this.#foo.toString() 120 | } 121 | 122 | #privateMethod() { 123 | return this.#foo + 1 124 | } 125 | 126 | plusOne() { 127 | return this.#privateMethod() 128 | } 129 | } 130 | 131 | const x = new X() 132 | test('private members', () => { 133 | expectEqual(x.m(), 1) 134 | }) 135 | 136 | test('private members (nested)', () => { 137 | expectEqual(x.m2(), '1') 138 | }) 139 | 140 | test('private method', () => { 141 | expectEqual(x.plusOne(), 2) 142 | }) 143 | } 144 | 145 | { 146 | class X { 147 | foo() { 148 | return 'a' 149 | } 150 | } 151 | 152 | // We can't serialize `foo` yet but we should be able to compile the class 153 | class Y extends X { 154 | foo() { 155 | return super.foo() + 'b' 156 | } 157 | } 158 | 159 | const y = new Y() 160 | test('super prop access', () => { 161 | expectEqual(y.foo(), 'ab') 162 | }) 163 | } 164 | 165 | // !commands 166 | // synapse test -------------------------------------------------------------------------------- /test/fixtures/synth/conditional-test/main.ts: -------------------------------------------------------------------------------- 1 | import { test, expectEqual } from 'synapse:test' 2 | 3 | const target = process.env.SYNAPSE_TARGET 4 | if (target === 'local') { 5 | test('local only', () => { 6 | expectEqual(target, 'local') 7 | }) 8 | } 9 | 10 | // !commands 11 | // synapse test 12 | -------------------------------------------------------------------------------- /test/fixtures/synth/default-provider/main.ts: -------------------------------------------------------------------------------- 1 | import * as aws from 'synapse-provider:aws' 2 | 3 | const bucket = new aws.S3Bucket() 4 | 5 | // !commands 6 | // synapse compile --target local 7 | -------------------------------------------------------------------------------- /test/fixtures/synth/default-provider/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "synapse": { 3 | "providers": { 4 | "aws": "*" 5 | } 6 | } 7 | } -------------------------------------------------------------------------------- /test/fixtures/synth/deployed-module/main.ts: -------------------------------------------------------------------------------- 1 | import * as pkg from 'pkg' 2 | import { Function } from 'synapse:srl/compute' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | const client = pkg.createClient() 6 | 7 | const fn = new Function(async (key: string) => { 8 | return client.get(key) 9 | }) 10 | 11 | test('client puts and gets', async () => { 12 | await client.put('foo', 'bar') 13 | expectEqual(await fn('foo'), 'bar') 14 | }) 15 | 16 | // !commands 17 | // (cd pkg && synapse deploy) 18 | // synapse test 19 | -------------------------------------------------------------------------------- /test/fixtures/synth/deployed-module/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "pkg": "file:pkg" 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/synth/deployed-module/pkg/main.ts: -------------------------------------------------------------------------------- 1 | import { Bucket } from 'synapse:srl/storage' 2 | 3 | const b = new Bucket() 4 | 5 | export function createClient() { 6 | function get(key: string) { 7 | return b.get(key, 'utf-8') 8 | } 9 | 10 | function put(key: string, data: string) { 11 | return b.put(key, data) 12 | } 13 | 14 | return { get, put } 15 | } 16 | -------------------------------------------------------------------------------- /test/fixtures/synth/deployed-module/pkg/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "exports": "./main.ts" 3 | } -------------------------------------------------------------------------------- /test/fixtures/synth/fn-bind/main.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | import { describe, it, test, expectEqual } from 'synapse:test' 3 | import './symEval' 4 | 5 | const add = (a: number, b: number) => a + b 6 | const plus1 = add.bind(undefined, 1) 7 | 8 | test('plus1', () => { 9 | expectEqual(plus1(1), 2) 10 | }) 11 | 12 | const plus1plus2 = plus1.bind(undefined, 2) 13 | 14 | test('plus1plus2', () => { 15 | expectEqual(plus1plus2(1), 3) 16 | }) 17 | 18 | function getThis(this: any) { 19 | return this 20 | } 21 | 22 | const getThisNumber = getThis.bind(5) 23 | const getThisBindTwice = getThisNumber.bind(6) // should not change `this` binding 24 | 25 | test('getThis', () => { 26 | expectEqual(getThisNumber(), 5) 27 | expectEqual(getThisBindTwice(), 5) 28 | }) 29 | 30 | describe('Function', () => { 31 | const fn = new Function(plus1) 32 | 33 | it('works with bound functions', async () => { 34 | expectEqual(await fn(1), 2) 35 | }) 36 | }) 37 | 38 | 39 | // !commands 40 | // synapse deploy 41 | // synapse test 42 | -------------------------------------------------------------------------------- /test/fixtures/synth/fn-bind/symEval.ts: -------------------------------------------------------------------------------- 1 | import { test, expectEqual } from 'synapse:test' 2 | import { symEval, bindFunctionModel } from 'synapse:core' 3 | 4 | const add = (a: number, b: number) => a + b 5 | 6 | const addCalls: [number, number][] = [] 7 | 8 | bindFunctionModel(add, (a, b) => { 9 | addCalls.push([a, b]) 10 | 11 | return add(a, b) 12 | }) 13 | 14 | const boundAdd = add.bind(undefined, 1) 15 | symEval(() => boundAdd(2)) 16 | 17 | test('boundAdd (symbolic evaluation)', () => { 18 | expectEqual(addCalls, [[1, 2]]) 19 | }) 20 | 21 | 22 | // TODO: need to rework some of the code for this 23 | // Test bound `this` 24 | 25 | // const addThis = function (this: number, n: number) { return this + n } 26 | 27 | // const addThisCalls: [number, number][] = [] 28 | 29 | // bindFunctionModel(addThis, function (n) { 30 | // addThisCalls.push([this, n]) 31 | 32 | // return addThis.call(this, n) 33 | // }) 34 | 35 | // const boundAddThis = addThis.bind(2, 3) 36 | // symEval(() => boundAddThis()) 37 | 38 | // test('boundAddThis (symbolic evaluation)', () => { 39 | // expectEqual(addThisCalls, [[2, 3]]) 40 | // }) 41 | 42 | -------------------------------------------------------------------------------- /test/fixtures/synth/js-symbols/main.ts: -------------------------------------------------------------------------------- 1 | import { test, expectEqual } from 'synapse:test' 2 | 3 | { 4 | let y = 0 5 | const x = { [Symbol.dispose]: () => { y += 1 } } 6 | 7 | test('dipose', () => { 8 | { 9 | using _ = x 10 | } 11 | expectEqual(y, 1) 12 | }) 13 | } 14 | 15 | { 16 | let y = 0 17 | const x = { [Symbol.asyncDispose]: async () => { y += 1 } } 18 | 19 | test('asyncDispose', async () => { 20 | { 21 | await using _ = x 22 | } 23 | expectEqual(y, 1) 24 | }) 25 | } 26 | 27 | let c = 0 28 | const iterable: Iterable = { 29 | [Symbol.iterator]: () => { 30 | const next = () => ({ 31 | value: c++, 32 | done: c > 5, 33 | }) 34 | 35 | return { next } 36 | } 37 | } 38 | 39 | test('iterator', () => { 40 | expectEqual([...iterable], [0, 1, 2, 3, 4]) 41 | }) 42 | 43 | // !commands 44 | // synapse test 45 | -------------------------------------------------------------------------------- /test/fixtures/synth/let-bindings/main.ts: -------------------------------------------------------------------------------- 1 | import { it, expectEqual } from 'synapse:test' 2 | 3 | let c = 1 4 | function fooFactory() { 5 | let x: number 6 | const foo = () => x 7 | x = c++ 8 | 9 | return foo 10 | } 11 | 12 | const foo1 = fooFactory() 13 | const foo2 = fooFactory() 14 | expectEqual(foo1(), 1) 15 | expectEqual(foo2(), 2) 16 | 17 | it('uses a different symbol binding id for each function', () => { 18 | expectEqual(foo1(), 1) 19 | expectEqual(foo2(), 2) 20 | }) 21 | 22 | function sharedFactory() { 23 | let x: number 24 | const get = () => x 25 | const inc = () => void x++ 26 | x = c++ 27 | 28 | // For good measure, we'll also create a nested function 29 | function createIncTwo() { 30 | const two = 2 31 | 32 | return function() { 33 | x += two 34 | } 35 | } 36 | 37 | return { get, inc, incTwo: createIncTwo() } 38 | } 39 | 40 | const shared = sharedFactory() 41 | expectEqual(shared.get(), 3) 42 | shared.inc() 43 | expectEqual(shared.get(), 4) 44 | 45 | it('shares scoped bindings across functions', () => { 46 | expectEqual(shared.get(), 4) 47 | shared.inc() 48 | expectEqual(shared.get(), 5) 49 | }) 50 | 51 | it('shares scoped bindings with nested functions', () => { 52 | expectEqual(shared.get(), 4) 53 | shared.incTwo() 54 | expectEqual(shared.get(), 6) 55 | }) 56 | 57 | // Circular references require late binding 58 | function fib(n: number): number { 59 | return n <= 1 ? n : doFib(n) 60 | } 61 | 62 | function doFib(n: number) { 63 | return fib(n - 2) + fib(n - 1) 64 | } 65 | 66 | expectEqual(fib(10), 55) 67 | 68 | it('handles late bindings', () => { 69 | expectEqual(fib(10), 55) 70 | }) 71 | 72 | const arr: number[] = [] 73 | function push(val: number) { 74 | arr.push(val) 75 | } 76 | 77 | function pop() { 78 | return arr.pop() 79 | } 80 | 81 | it('shares arrays across functions', () => { 82 | push(1) 83 | expectEqual(pop(), 1) 84 | }) 85 | 86 | const fns: (() => number)[] = [] 87 | for (let i = 0; i < 2; i++) { 88 | fns[i] = () => i 89 | } 90 | 91 | it('captures loop variable correctly in closures (constant)', () => { 92 | expectEqual(fns[0](), 0) 93 | expectEqual(fns[1](), 1) 94 | }) 95 | 96 | // !commands 97 | // synapse deploy 98 | // synapse test 99 | -------------------------------------------------------------------------------- /test/fixtures/synth/react-jsx/main.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { renderToString } from 'react-dom/server' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | function ChildComponent(props: { foo: string }) { 6 | return

    {props.foo}
    7 | } 8 | 9 | function MainComponent() { 10 | return
    11 | 12 |
    13 | } 14 | 15 | test('render a component', () => { 16 | expectEqual(renderToString(MainComponent()), '
    bar
    ') 17 | }) -------------------------------------------------------------------------------- /test/fixtures/synth/react-jsx/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "jsx": "react-jsx", 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/synth/runtime-transforms/callables-exported.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | 3 | export const y = new Function(async () => ({ data: 'hello' })) 4 | -------------------------------------------------------------------------------- /test/fixtures/synth/runtime-transforms/callables.ts: -------------------------------------------------------------------------------- 1 | import * as exported from './callables-exported' 2 | import { Function } from 'synapse:srl/compute' 3 | import { describe, it, expectEqual } from 'synapse:test' 4 | 5 | const x = new Function(async () => ({ data: 'hello' })) 6 | 7 | export function foo() { 8 | return new Function(async () => ({ data: 'hello' })) 9 | } 10 | 11 | describe('callables', () => { 12 | it('transforms local function calls', async () => { 13 | const resp = await x() 14 | expectEqual(resp.data, 'hello') 15 | }) 16 | 17 | it('works for exported functions', async () => { 18 | const resp2 = await exported.y() 19 | expectEqual(resp2.data, 'hello') 20 | }) 21 | }) 22 | 23 | -------------------------------------------------------------------------------- /test/fixtures/synth/runtime-transforms/main.ts: -------------------------------------------------------------------------------- 1 | // !commands 2 | // synapse deploy 3 | // synapse test 4 | -------------------------------------------------------------------------------- /test/fixtures/synth/stub-when-bundled/main.ts: -------------------------------------------------------------------------------- 1 | import { HttpService } from 'synapse:srl/compute' 2 | import { stubWhenBundled } from 'synapse:core' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | function bar() { 6 | return 'bar' 7 | } 8 | 9 | const shouldStub = !!process.env.SHOULD_STUB 10 | if (shouldStub) { 11 | stubWhenBundled(bar) 12 | } 13 | 14 | const service = new HttpService() 15 | const route = service.route('GET', '/foo', bar) 16 | 17 | test(shouldStub ? 'it can stub' : 'it does not stub', async () => { 18 | const actual = await service.callOperation(route) 19 | if (shouldStub) { 20 | expectEqual(actual, undefined) 21 | } else { 22 | expectEqual(actual, 'bar') 23 | } 24 | }) 25 | 26 | // !commands 27 | // synapse deploy 28 | // synapse test 29 | // SHOULD_STUB=1 synapse compile 30 | // synapse deploy 31 | // synapse test 32 | // synapse destroy 33 | -------------------------------------------------------------------------------- /test/fixtures/synth/test-hooks/main.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, before, after, expectEqual } from 'synapse:test' 2 | 3 | describe('before', () => { 4 | let x: number 5 | 6 | it('works when placed after the test declaration', () => { 7 | expectEqual(x, 1) 8 | }) 9 | 10 | before(() => { 11 | x = 1 12 | }) 13 | 14 | describe('nested', () => { 15 | it('works when nested', () => { 16 | expectEqual(x, 1) 17 | }) 18 | 19 | let y: number 20 | before(() => { 21 | y = 2 22 | }) 23 | 24 | it('supports multiple hooks', () => { 25 | expectEqual(x, 1) 26 | expectEqual(y, 2) 27 | }) 28 | }) 29 | 30 | describe('nested conflict', () => { 31 | before(() => { 32 | x = 2 33 | }) 34 | 35 | it('works when nested', () => { 36 | expectEqual(x, 2) 37 | }) 38 | }) 39 | }) 40 | 41 | // !commands 42 | // synapse test 43 | -------------------------------------------------------------------------------- /test/fixtures/test/cache-indirect/main.ts: -------------------------------------------------------------------------------- 1 | import { HttpService } from 'synapse:srl/compute' 2 | import { fetch } from 'synapse:http' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | const service = new HttpService() 6 | 7 | const goodData = 1 8 | const badData = 0 9 | const data = process.env.USE_GOOD_DATA !== '0' ? goodData : badData 10 | 11 | const getData = service.route('GET', '/data', () => data) 12 | 13 | test('returns good data', async () => { 14 | const resp = await fetch(getData) 15 | expectEqual(resp, goodData) 16 | }) 17 | 18 | // !commands 19 | // # First make sure the test passes normally 20 | // export USE_GOOD_DATA=1 21 | // synapse deploy 22 | // synapse test 23 | // 24 | // # Now we build with bad data 25 | // export USE_GOOD_DATA=0 26 | // synapse compile 27 | // synapse deploy 28 | // synapse test @expectFail 29 | // 30 | // # Check that tests pass again after rebuild 31 | // export USE_GOOD_DATA=1 32 | // synapse compile 33 | // synapse deploy 34 | // synapse test 35 | -------------------------------------------------------------------------------- /test/fixtures/test/caching/main.ts: -------------------------------------------------------------------------------- 1 | import { test, expectEqual } from 'synapse:test' 2 | 3 | const a = process.env.A || 'a' 4 | const b = process.env.B || 'b' 5 | 6 | test('a', () => expectEqual(a, a)) 7 | test('b', () => expectEqual(b, b)) 8 | 9 | export function main(output: string, expected: string) { 10 | const lastLine = output.split('\n').at(-1) 11 | expectEqual(lastLine, `Skipped ${expected} unchanged tests`) 12 | } 13 | 14 | // !commands 15 | // synapse deploy && synapse test 16 | // export OUTPUT=$(synapse test) 17 | // synapse run -- "$OUTPUT" 2 18 | // 19 | // export A=aa 20 | // synapse compile && synapse deploy 21 | // export OUTPUT=$(synapse test) 22 | // synapse run -- "$OUTPUT" 1 23 | -------------------------------------------------------------------------------- /test/fixtures/test/hooks/main.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, before, after, expectEqual } from 'synapse:test' 2 | 3 | describe('before', () => { 4 | let x: number 5 | 6 | it('works when placed after the test declaration', () => { 7 | expectEqual(x, 1) 8 | }) 9 | 10 | before(() => { 11 | x = 1 12 | }) 13 | 14 | describe('nested', () => { 15 | it('works when nested', () => { 16 | expectEqual(x, 1) 17 | }) 18 | 19 | let y: number 20 | before(() => { 21 | y = 2 22 | }) 23 | 24 | it('supports multiple hooks', () => { 25 | expectEqual(x, 1) 26 | expectEqual(y, 2) 27 | }) 28 | }) 29 | 30 | describe('nested conflict', () => { 31 | before(() => { 32 | x = 2 33 | }) 34 | 35 | it('works when nested (top down)', () => { 36 | expectEqual(x, 2) 37 | }) 38 | }) 39 | }) 40 | 41 | describe('after', () => { 42 | let x: number 43 | let afterCount = 0 44 | 45 | after(() => { 46 | expectEqual(x, 1) 47 | // Little bit of a hack because there are few ways to know for 48 | // sure `after` was called without another testing layer on top 49 | console.log('called after', afterCount++) 50 | }) 51 | 52 | it('works when placed before the test declaration', () => { 53 | x = 1 54 | }) 55 | 56 | describe('nested', () => { 57 | let y: number 58 | 59 | it('supports multiple hooks', () => { 60 | x = 1 61 | y = 2 62 | }) 63 | 64 | after(() => { 65 | expectEqual(y, 2) 66 | console.log('called after', afterCount++) 67 | }) 68 | }) 69 | 70 | describe('nested conflict', () => { 71 | after(() => { 72 | expectEqual(x, 2) 73 | x = 1 74 | console.log('called after', afterCount++) 75 | }) 76 | 77 | it('works when nested (bottoms up)', () => { 78 | x = 2 79 | }) 80 | }) 81 | }) 82 | 83 | // !commands 84 | // # We can verify the order of `after` calls by parsing the output 85 | // export OUTPUT=$(synapse test --show-logs | grep "called after" | cut -c 18 | tr -d "\n") 86 | // @expectEqual "$OUTPUT" 00101 87 | -------------------------------------------------------------------------------- /test/fixtures/test/removed-file/main.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'node:path' 2 | import { test, expectEqual } from 'synapse:test' 3 | 4 | const basename = path.basename(__filename) 5 | test(`check filename ${basename}`, () => { 6 | expectEqual(basename, 'main.ts') 7 | }) 8 | 9 | // !commands 10 | // synapse test 11 | // 12 | // cp main.ts main2.ts 13 | // (synapse test | grep "\[FAILED\] check filename main2.ts") || (echo "Expected failure message for 2nd test" && false) 14 | // 15 | // rm main2.ts 16 | // synapse test 17 | // synapse test main2.ts @expectFail 18 | // 19 | // 20 | // @finally rm -f main2.ts 21 | -------------------------------------------------------------------------------- /test/fixtures/test/rollback-if-failed/main.ts: -------------------------------------------------------------------------------- 1 | import { Function } from 'synapse:srl/compute' 2 | import { test, expectEqual } from 'synapse:test' 3 | 4 | const foo = process.env.FOO ?? 'foo' 5 | const fn = new Function(async () => { 6 | return foo 7 | }) 8 | 9 | test('a test', async () => { 10 | expectEqual(await fn(), 'foo') 11 | }) 12 | 13 | export async function main(expected: string) { 14 | expectEqual(await fn(), expected) 15 | } 16 | 17 | // !commands 18 | // # Base-case 19 | // synapse deploy 20 | // synapse test 21 | // synapse run -- foo 22 | // 23 | // export FOO=bar 24 | // synapse compile 25 | // synapse deploy 26 | // synapse run -- $FOO 27 | // synapse test --rollback-if-failed @expectFail "Expected test to fail" 28 | // synapse run -- foo 29 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/classes/adder.zig: -------------------------------------------------------------------------------- 1 | const js = @import("js"); 2 | 3 | pub const Adder = struct { 4 | a: u32, 5 | 6 | pub fn init(a: u32) @This() { 7 | return .{ .a = a }; 8 | } 9 | 10 | pub fn add(this: *@This(), b: u32) u32 { 11 | return this.a + b; 12 | } 13 | }; 14 | 15 | comptime { 16 | js.registerModule(@This()); 17 | } 18 | 19 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/classes/main.ts: -------------------------------------------------------------------------------- 1 | import { Adder } from './adder.zig' 2 | import { test, expectEqual } from 'synapse:test' 3 | 4 | test('Adder', () => { 5 | const adder = new Adder(1) 6 | expectEqual(adder.add(2), 3) 7 | }) 8 | 9 | 10 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/classes/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowArbitraryExtensions": true 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/host-arch/add.zig: -------------------------------------------------------------------------------- 1 | pub fn add(a: u32, b: u32) u32 { 2 | return a + b; 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/host-arch/main.ts: -------------------------------------------------------------------------------- 1 | import { add } from './add.zig' 2 | import { test, expectEqual } from 'synapse:test' 3 | import { LambdaFunction } from '@cohesible/synapse-aws/lambda' 4 | 5 | const archs = ['aarch64', 'x64'] as const 6 | 7 | for (const arch of archs) { 8 | const fn = new LambdaFunction(add, { arch }) 9 | test(arch, async () => { 10 | const actual = await fn.invoke(2, 2) // FIXME: `LambdaFunction` doesn't inherit types from `Function` 11 | expectEqual(actual, 4) 12 | }) 13 | } 14 | 15 | // !commands 16 | // # TODO: we can only run this test if we have AWS creds 17 | // echo "skipped"; exit 0 18 | // synapse test 19 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/host-arch/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "allowArbitraryExtensions": true 5 | } 6 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/multiple-files/main.ts: -------------------------------------------------------------------------------- 1 | import * as one from './one.zig' 2 | import * as two from './two.zig' 3 | import { test, expectEqual } from 'synapse:test' 4 | 5 | test('one', () => { 6 | expectEqual(one.addOne(1), 2) 7 | }) 8 | 9 | test('two', () => { 10 | expectEqual(two.addTwo(2), 4) 11 | }) 12 | 13 | // !commands 14 | // synapse test 15 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/multiple-files/one.zig: -------------------------------------------------------------------------------- 1 | const add = @import("./shared.zig").add; 2 | 3 | pub fn addOne(a: u32) u32 { 4 | return add(a, 1); 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/multiple-files/shared.zig: -------------------------------------------------------------------------------- 1 | pub fn add(a: u32, b: u32) u32 { 2 | return a + b; 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/multiple-files/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "allowArbitraryExtensions": true 5 | } 6 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/multiple-files/two.zig: -------------------------------------------------------------------------------- 1 | const add = @import("./shared.zig").add; 2 | 3 | pub fn addTwo(a: u32) u32 { 4 | return add(a, 2); 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/promises/main.ts: -------------------------------------------------------------------------------- 1 | import { expect, expectEqual } from 'synapse:test' 2 | import { add, throwMe, returnVoid } from './mod.zig' 3 | 4 | export async function main() { 5 | const res = await add(2, 2) 6 | console.log('2 + 2 =', res) 7 | expectEqual(res, 4) 8 | 9 | const p = throwMe() 10 | expect(p instanceof Promise) 11 | 12 | try { 13 | await p 14 | } catch (e) { 15 | expect(e instanceof Error) 16 | expectEqual((e as { code?: string }).code, 'Failed') 17 | console.log('Caught error') 18 | } 19 | 20 | expectEqual(await returnVoid(), undefined) 21 | console.log('Returned void') 22 | } 23 | 24 | // !commands 25 | // synapse run 26 | 27 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/promises/mod.zig: -------------------------------------------------------------------------------- 1 | const js = @import("js"); 2 | 3 | pub fn add(a: u32, b: u32) js.Promise(u32) { 4 | return .{ a + b }; 5 | } 6 | 7 | pub fn throwMe() !js.Promise(void) { 8 | return error.Failed; 9 | } 10 | 11 | pub fn returnVoid() js.Promise(void) { 12 | return .{}; 13 | } 14 | 15 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/promises/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowArbitraryExtensions": true 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple-outdir/add.zig: -------------------------------------------------------------------------------- 1 | pub fn add(a: u32, b: u32) u32 { 2 | return a + b; 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple-outdir/main.ts: -------------------------------------------------------------------------------- 1 | import { add } from './add.zig' 2 | import { expectEqual } from 'synapse:test' 3 | 4 | export function main() { 5 | expectEqual(add(1, 1), 2) 6 | } 7 | 8 | // !commands 9 | // synapse run 10 | // synapse build 11 | // ./dist/bin/main 12 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple-outdir/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowArbitraryExtensions": true, 4 | "outDir": "dist" 5 | } 6 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple/add.zig: -------------------------------------------------------------------------------- 1 | pub fn add(a: u32, b: u32) u32 { 2 | return a + b; 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple/main.ts: -------------------------------------------------------------------------------- 1 | import { test, expect, expectEqual } from 'synapse:test' 2 | import { add } from './add.zig' 3 | import { Function } from 'synapse:srl/compute' 4 | 5 | test('add 1+1', () => { 6 | expectEqual(add(1, 1), 2) 7 | }) 8 | 9 | const addFn = new Function(add) 10 | 11 | test('add 1+1 (Function)', async () => { 12 | expectEqual(await addFn(1, 1), 2) 13 | }) 14 | 15 | // test('add (optimized)', () => { 16 | // // Trigger lazy-load 17 | // add(1, 1) 18 | 19 | // function getNsPerCall(iters: number) { 20 | // const start = performance.now() 21 | // for (let i = 0; i < iters; i++) { 22 | // add(1, 1) 23 | // } 24 | 25 | // const dur = performance.now() - start 26 | 27 | // return (dur*1e6) / iters 28 | // } 29 | 30 | // const beforeOpt = getNsPerCall(100) 31 | // getNsPerCall(100000) 32 | // const afterOpt = getNsPerCall(100) 33 | 34 | // // There is another way to test this: add compile-time instrumentation 35 | // // to `js.zig` that counts whenever a slow call is made 36 | // const ratio = beforeOpt / afterOpt 37 | // expect(ratio >= 10) 38 | // }) 39 | 40 | export function main() { 41 | expectEqual(add(1, 1), 2) 42 | } 43 | 44 | // !commands 45 | // synapse deploy 46 | // synapse test 47 | // synapse run 48 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/simple/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowArbitraryExtensions": true 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/strings/main.ts: -------------------------------------------------------------------------------- 1 | import { expect, expectEqual } from 'synapse:test' 2 | import { concat } from './mod.zig' 3 | 4 | export function main() { 5 | const res = concat('foo', 'bar') 6 | expectEqual(res, 'foobar') 7 | 8 | try { 9 | concat('a', 'b') 10 | expect(false) 11 | } catch (e) { 12 | expectEqual((e as any)?.code, 'StringTooSmall') 13 | } 14 | 15 | console.log('it works!') 16 | } 17 | 18 | // !commands 19 | // synapse run 20 | 21 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/strings/mod.zig: -------------------------------------------------------------------------------- 1 | const js = @import("js"); 2 | const std = @import("std"); 3 | 4 | pub fn concat(a: js.UTF8String, b: js.UTF8String) !js.UTF8String { 5 | if (a.data.len == 1) { 6 | return error.StringTooSmall; 7 | } 8 | 9 | var buf = try std.heap.c_allocator.allocSentinel(u8, a.data.len + b.data.len, 0); 10 | @memcpy(buf[0..a.data.len], a.data); 11 | @memcpy(buf[a.data.len..], b.data); 12 | 13 | return .{ .data = buf }; 14 | } 15 | 16 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/strings/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowArbitraryExtensions": true 4 | } 5 | } -------------------------------------------------------------------------------- /test/fixtures/zig-modules/types/expected.d.ts: -------------------------------------------------------------------------------- 1 | export interface MyStruct { 2 | foo: number; 3 | } 4 | export declare function getStruct(foo: number): MyStruct; 5 | export interface MyNestedStruct { 6 | foo: number; 7 | nested: MyStruct; 8 | } 9 | export declare function getNestedStruct(foo: number): MyNestedStruct; 10 | export declare function getFoo(s: MyStruct): number; 11 | export declare function addU64(a: number | bigint, b: number | bigint): number | bigint; 12 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/types/main.ts: -------------------------------------------------------------------------------- 1 | import * as mod from './mod.zig' 2 | import { test, expectEqual } from 'synapse:test' 3 | import * as path from 'node:path' 4 | import * as fs from 'node:fs/promises' 5 | 6 | test('return struct', () => { 7 | expectEqual(mod.getStruct(1), { foo: 1 }) 8 | }) 9 | 10 | test('return nested struct', () => { 11 | expectEqual(mod.getNestedStruct(1), { foo: 1, nested: { foo: 1 } }) 12 | }) 13 | 14 | test('struct parameter', () => { 15 | expectEqual(mod.getFoo({ foo: 2 }), 2) 16 | }) 17 | 18 | test('bigint params + return', () => { 19 | expectEqual(mod.addU64(9007199254740992n, 9007199254740992n), 18014398509481984n) 20 | }) 21 | 22 | const typeDefs = path.resolve(__dirname, 'mod.d.zig.ts') 23 | const expectedPath = path.resolve(__dirname, 'expected.d.ts') 24 | 25 | test('generated type definitions', async () => { 26 | const [actual, expected] = await Promise.all([ 27 | fs.readFile(typeDefs, 'utf-8'), 28 | fs.readFile(expectedPath, 'utf-8'), 29 | ]) 30 | expectEqual(actual, expected) 31 | }) 32 | 33 | // !commands 34 | // synapse test 35 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/types/mod.zig: -------------------------------------------------------------------------------- 1 | pub const MyStruct = struct { 2 | foo: u32, 3 | }; 4 | 5 | pub fn getStruct(foo: u32) MyStruct { 6 | return .{ .foo = foo }; 7 | } 8 | 9 | pub const MyNestedStruct = struct { 10 | foo: u32, 11 | nested: MyStruct, 12 | }; 13 | 14 | pub fn getNestedStruct(foo: u32) MyNestedStruct { 15 | return .{ 16 | .foo = foo, 17 | .nested = .{ .foo = foo }, 18 | }; 19 | } 20 | 21 | pub fn getFoo(s: MyStruct) u32 { 22 | return s.foo; 23 | } 24 | 25 | pub fn addU64(a: u64, b: u64) u64 { 26 | return a + b; 27 | } 28 | -------------------------------------------------------------------------------- /test/fixtures/zig-modules/types/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "allowArbitraryExtensions": true 5 | } 6 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "NodeNext", 5 | "moduleResolution": "NodeNext", 6 | "resolveJsonModule": true, 7 | "checkJs": false, 8 | "declaration": true, 9 | "sourceMap": true, 10 | "outDir": "./dist", 11 | "rootDir": ".", 12 | "esModuleInterop": true, 13 | "forceConsistentCasingInFileNames": true, 14 | "strict": true, 15 | "declarationMap": true, 16 | "skipLibCheck": true, 17 | "alwaysStrict": true, 18 | "allowArbitraryExtensions": true, 19 | }, 20 | "include": ["src"], 21 | "exclude": ["**/*.d.zig.ts"] 22 | } 23 | --------------------------------------------------------------------------------