├── .gitattributes
├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── .gitmodules
├── LICENSE
├── README.md
├── bench
├── main.zig
└── samples
│ ├── add-one.zig
│ ├── fibonacci.zig
│ └── mandelbrot.zig
├── build.zig
├── build.zig.zon
├── optimize.md
├── run
└── main.zig
├── src
├── .clang-format
├── bytebox.h
├── cffi.zig
├── common.zig
├── core.zig
├── definition.zig
├── instance.zig
├── metering.zig
├── opcode.zig
├── stringpool.zig
├── tests.zig
├── vm_register.zig
├── vm_stack.zig
└── wasi.zig
└── test
├── mem64
├── main.zig
└── memtest.zig
├── wasi
├── bytebox_adapter.py
├── run.py
└── runtests.sh
└── wasm
└── main.zig
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.zig text eol=lf
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 |
9 | jobs:
10 | tests:
11 | strategy:
12 | fail-fast: false
13 | matrix:
14 | os: [windows-latest, macos-latest, ubuntu-latest]
15 | runs-on: ${{matrix.os}}
16 | steps:
17 | - uses: actions/checkout@v2
18 | with:
19 | submodules: 'true'
20 |
21 | - name: Setup Zig
22 | uses: mlugg/setup-zig@v1
23 | with:
24 | version: 0.13.0
25 |
26 | - name: Setup Python
27 | uses: actions/setup-python@v4
28 | with:
29 | python-version: '3.11'
30 | cache: pip
31 |
32 | - name: Install python dependencies
33 | working-directory: test/wasi/wasi-testsuite/test-runner
34 | run: python3 -m pip install -r requirements.txt
35 |
36 | # Ideally we would use this but it seems to be broken
37 | # - name: Setup wasm-tools
38 | # uses: jcbhmr/setup-wasm-tools@v2
39 | # with:
40 | # wasm-tools-version: 1.207
41 |
42 | - name: Setup wasm-tools
43 | run: cargo install wasm-tools
44 |
45 | - name: Build
46 | run: |
47 | zig build
48 |
49 | - name: Run unit tests
50 | run: |
51 | zig build test-unit
52 | zig build -Dmeter=true test-unit
53 |
54 | - name: Run wasm testsuite
55 | run: |
56 | zig build test-wasm -- --log-suite
57 |
58 | - name: Run mem64 test
59 | run: |
60 | zig build test-mem64
61 |
62 | - name: Run wasi testsuite
63 | run: |
64 | zig build test-wasi
65 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .zig-cache
2 | zig-out
3 | test/wasm/wasm-generated
4 | *.wasm
5 | *.wasm.o
6 | .vs
7 | *.sublime-project
8 | *.sublime-workspace
9 | *.sln
10 | *.rdbg
11 | .DS_Store
12 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "zig-stable-array"]
2 | path = zig-stable-array
3 | url = https://github.com/rdunnington/zig-stable-array.git
4 | [submodule "test/testsuite"]
5 | path = test/wasm/wasm-testsuite
6 | url = https://github.com/WebAssembly/testsuite.git
7 | [submodule "test/wasi/wasi-testsuite"]
8 | path = test/wasi/wasi-testsuite
9 | url = https://github.com/WebAssembly/wasi-testsuite.git
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Reuben Dunnington
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Bytebox
2 |
3 |
4 |

5 |
6 | Bytebox is a WebAssembly VM.
7 |
8 |
9 | # Getting started
10 |
11 | ### Requirements
12 | Bytebox currently builds with [Zig 0.13.x](https://ziglang.org/download) to avoid churn on zig master.
13 |
14 | To run the tests:
15 | * `wasm-tools` is required to run the wasm testsuite. You can install it via the rust toolchain `cargo install wasm-tools` or directly from the [release page](https://github.com/bytecodealliance/wasm-tools/releases).
16 | * `python3` is required to run the wasi testsuite. You may need to run `python3 -m pip install -r test/wasi/wasi-testsuite/test-runner/requirements.txt` to ensure the wasi test runner has all the necessary dependencies installed.
17 |
18 | ## Run Tests
19 |
20 | ```sh
21 | git clone --recurse-submodules https://github.com/rdunnington/bytebox.git
22 | cd bytebox
23 | zig build test-unit # run builtin zig unit tests
24 | zig build test-wasm # run official wasm spec testsuite
25 | zig build test-wasi # run official wasi spec testsuite
26 | zig build test-mem64 # run memory64 compat test
27 | zig build test # run all of the above in parallel (output will not be pretty!)
28 | ```
29 |
30 | ## Usage
31 |
32 | You can use the standalone runtime to load and execute WebAssembly programs:
33 | ```sh
34 | zig build run -- [function] [function args]...
35 | ```
36 |
37 | Or embed Bytebox in your own programs:
38 |
39 | ```zig
40 | // build.zig
41 | const std = @import("std");
42 |
43 | pub fn build(b: *std.build.Builder) void {
44 | const exe = b.addExecutable("my_program", "src/main.zig");
45 | exe.addPackage(std.build.Pkg{
46 | .name = "bytebox",
47 | .source = .{ .path = "bytebox/src/core.zig" }, // submodule in the root dir
48 | });
49 | exe.setTarget(b.standardTargetOptions(.{}));
50 | exe.setBuildMode(b.standardReleaseOptions());
51 | exe.install();
52 | const run = exe.run();
53 | const step = b.step("run", "runs my_program");
54 | step.dependOn(&run.step);
55 | }
56 |
57 | // main.zig
58 | const std = @import("std");
59 | const bytebox = @import("bytebox");
60 |
61 | pub fn main() !void {
62 | var gpa = std.heap.GeneralPurposeAllocator(.{}){};
63 | var allocator: std.mem.Allocator = gpa.allocator();
64 |
65 | const wasm_data: []u8 = try std.fs.cwd().readFileAlloc(allocator, "example.wasm", 1024 * 128);
66 | defer allocator.free(wasm_data);
67 |
68 | const module_def = try bytebox.createModuleDefinition(allocator, .{});
69 | defer module_def.destroy();
70 | try module_def.decode(wasm_data);
71 |
72 | const module_instance = try bytebox.createModuleInstance(.Stack, module_def, allocator);
73 | defer module_instance.destroy();
74 | try module_instance.instantiate(.{});
75 | }
76 | ```
77 |
78 | Inter-language FFI is also supported. See `src/bytebox.h` for an overview in C. To use bytebox as a static library, link with the built library in `zig-out/lib/`. Note that Zig assumes a default stack size of 8MB, so you'll need to ensure the same in your program.
79 |
80 | # Status
81 |
82 | This project is still in the alpha stage.
83 |
84 | | Legend | Meaning |
85 | | --- | --- |
86 | |✅|Implemented|
87 | |❌|TODO|
88 | |💀|Not planned/Removed from spec|
89 |
90 | ## [WebAssembly](https://webassembly.github.io/spec/core/index.html) support:
91 |
92 | | Status | Feature |
93 | | --- | --- |
94 | |✅|WebAssembly 1.0|
95 | |✅|Sign extension instructions|
96 | |✅|Non-trapping float-to-int conversion|
97 | |✅|Multiple values|
98 | |✅|Reference types|
99 | |✅|Table instructions|
100 | |✅|Multiple tables|
101 | |✅|Bulk memory and table instructions|
102 | |✅|Vector instructions|
103 |
104 | ## [WASI Preview 1](https://github.com/WebAssembly/WASI/tree/main) support:
105 |
106 | | Status | Feature |
107 | | --- | --- |
108 | |✅|args_get|
109 | |✅|args_sizes_get|
110 | |✅|environ_get|
111 | |✅|environ_sizes_get|
112 | |✅|clock_res_get|
113 | |✅|clock_time_get|
114 | |✅|fd_advise|
115 | |✅|fd_allocate|
116 | |✅|fd_close|
117 | |✅|fd_datasync|
118 | |✅|fd_fdstat_get|
119 | |✅|fd_fdstat_set_flags|
120 | |💀|fd_fdstat_set_rights|
121 | |✅|fd_filestat_get|
122 | |✅|fd_filestat_set_size|
123 | |✅|fd_filestat_set_times|
124 | |✅|fd_pread|
125 | |✅|fd_prestat_get|
126 | |✅|fd_prestat_dir_name|
127 | |✅|fd_pwrite|
128 | |✅|fd_read|
129 | |✅|fd_readdir|
130 | |✅|fd_renumber|
131 | |✅|fd_seek|
132 | |❌|fd_sync|
133 | |✅|fd_tell|
134 | |✅|fd_write|
135 | |✅|path_create_directory|
136 | |✅|path_filestat_get|
137 | |✅|path_filestat_set_times|
138 | |❌|path_link|
139 | |✅|path_open|
140 | |❌|path_readlink|
141 | |✅|path_remove_directory|
142 | |❌|path_rename|
143 | |✅|path_symlink|
144 | |✅|path_unlink_file|
145 | |❌|poll_oneoff|
146 | |✅|proc_exit|
147 | |💀|proc_raise|
148 | |❌|sched_yield|
149 | |✅|random_get|
150 | |❌|sock_accept|
151 | |❌|sock_recv|
152 | |❌|sock_send|
153 | |❌|sock_shutdown|
154 |
155 | ### Roadmap
156 | These tasks must be completed to enter alpha:
157 | * API ergonomics pass
158 | * Documentation
159 | * General TODO/code cleanup
160 | * Crash hardening/fuzzing
161 |
162 | To enter beta:
163 | * No breaking API changes after this point
164 | * Performance competitive with other well-known interpreters (e.g. [micro-wasm-runtime](https://github.com/bytecodealliance/wasm-micro-runtime), [wasm3](https://github.com/wasm3/wasm3))
165 |
166 | To have a 1.0 release:
167 | * Tested with a wide variety of wasm programs
168 | * Successfully used in other beta-quality projects
169 |
--------------------------------------------------------------------------------
/bench/main.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const bytebox = @import("bytebox");
3 | const Val = bytebox.Val;
4 | const Timer = std.time.Timer;
5 |
6 | pub const std_options: std.Options = .{
7 | .log_level = .info,
8 | };
9 |
10 | const Benchmark = struct {
11 | name: []const u8,
12 | filename: []const u8,
13 | param: i32,
14 | };
15 |
16 | fn elapsedMilliseconds(timer: *std.time.Timer) f64 {
17 | const ns_elapsed: f64 = @as(f64, @floatFromInt(timer.read()));
18 | const ms_elapsed = ns_elapsed / 1000000.0;
19 | return ms_elapsed;
20 | }
21 |
22 | fn run(allocator: std.mem.Allocator, benchmark: Benchmark) !void {
23 | var cwd = std.fs.cwd();
24 | const wasm_data: []u8 = try cwd.readFileAlloc(allocator, benchmark.filename, 1024 * 64); // Our wasm programs aren't very large
25 |
26 | var timer = try Timer.start();
27 |
28 | var module_def = try bytebox.createModuleDefinition(allocator, .{});
29 | defer module_def.destroy();
30 | try module_def.decode(wasm_data);
31 |
32 | var module_instance = try bytebox.createModuleInstance(.Stack, module_def, allocator);
33 | defer module_instance.destroy();
34 | try module_instance.instantiate(.{});
35 |
36 | const handle = try module_instance.getFunctionHandle("run");
37 | var input = [1]Val{.{ .I32 = benchmark.param }};
38 | var output = [1]Val{.{ .I32 = 0 }};
39 | try module_instance.invoke(handle, &input, &output, .{});
40 |
41 | const ms_elapsed: f64 = elapsedMilliseconds(&timer);
42 | std.log.info("{s} decode+instantiate+run took {d}ms\n", .{ benchmark.name, ms_elapsed });
43 | }
44 |
45 | pub fn main() !void {
46 | var gpa = std.heap.GeneralPurposeAllocator(.{}){};
47 | const allocator: std.mem.Allocator = gpa.allocator();
48 |
49 | const benchmarks = [_]Benchmark{ .{
50 | .name = "add-one",
51 | .filename = "zig-out/bin/add-one.wasm",
52 | .param = 123456789,
53 | }, .{
54 | .name = "fibonacci",
55 | .filename = "zig-out/bin/fibonacci.wasm",
56 | .param = 20,
57 | }, .{
58 | .name = "mandelbrot",
59 | .filename = "zig-out/bin/mandelbrot.wasm",
60 | .param = 20,
61 | } };
62 |
63 | for (benchmarks) |benchmark| {
64 | run(allocator, benchmark) catch |e| {
65 | std.log.err("{s} 'run' invocation failed with error: {}\n", .{ benchmark.name, e });
66 | return e;
67 | };
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/bench/samples/add-one.zig:
--------------------------------------------------------------------------------
1 | export fn run(n: i32) i32 {
2 | return n + 1;
3 | }
4 |
--------------------------------------------------------------------------------
/bench/samples/fibonacci.zig:
--------------------------------------------------------------------------------
1 | export fn run(n: i32) i32 {
2 | if (n < 2) {
3 | return 1;
4 | } else {
5 | const a = run(n - 1);
6 | const b = run(n - 2);
7 | return a + b;
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/bench/samples/mandelbrot.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const complex = std.math.complex;
3 | const Complex = complex.Complex(f32);
4 |
5 | const Color = struct {
6 | R: u8,
7 | G: u8,
8 | B: u8,
9 | };
10 |
11 | const COLOR_BLACK = Color{ .R = 0, .G = 0, .B = 0 };
12 | const COLOR_WHITE = Color{ .R = 255, .G = 255, .B = 255 };
13 |
14 | const WIDTH = 256;
15 | const HEIGHT = 256;
16 |
17 | var pixels = [_]Color{.{ .R = 0, .G = 0, .B = 0 }} ** (WIDTH * HEIGHT);
18 |
19 | fn mandelbrot(c: Complex, max_counter: i32) Color {
20 | var counter: u32 = 0;
21 | var z = Complex.init(0, 0);
22 | while (counter < max_counter) : (counter += 1) {
23 | z = z.mul(z).add(c);
24 | if (2.0 <= complex.abs(z)) {
25 | return COLOR_WHITE;
26 | }
27 | }
28 |
29 | return COLOR_BLACK;
30 | }
31 |
32 | export fn run(max_counter: i32) i32 {
33 | var y: u32 = 0;
34 | while (y < HEIGHT) : (y += 1) {
35 | var x: u32 = 0;
36 | while (x < WIDTH) : (x += 1) {
37 | const c = Complex.init(@as(f32, @floatFromInt(x)), @as(f32, @floatFromInt(y)));
38 | const color: Color = mandelbrot(c, max_counter);
39 | pixels[y * HEIGHT + x] = color;
40 | }
41 | }
42 |
43 | return 0;
44 | }
45 |
--------------------------------------------------------------------------------
/build.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 |
3 | const Build = std.Build;
4 | const Module = Build.Module;
5 | const ModuleImport = Module.Import;
6 | const CrossTarget = std.zig.CrossTarget;
7 | const CompileStep = std.Build.Step.Compile;
8 |
9 | const ExeOpts = struct {
10 | exe_name: []const u8,
11 | root_src: []const u8,
12 | step_name: []const u8,
13 | description: []const u8,
14 | step_dependencies: ?[]*Build.Step = null,
15 | emit_asm_step: ?*Build.Step = null,
16 | options: *Build.Step.Options,
17 | };
18 |
19 | pub fn build(b: *Build) void {
20 | const enable_metering = b.option(bool, "meter", "Enable metering") orelse false;
21 | const enable_debug_trace = b.option(bool, "debug_trace", "Enable debug tracing feature") orelse false;
22 | const enable_debug_trap = b.option(bool, "debug_trap", "Enable debug trap features") orelse false;
23 |
24 | const options = b.addOptions();
25 | options.addOption(bool, "enable_metering", enable_metering);
26 | options.addOption(bool, "enable_debug_trace", enable_debug_trace);
27 | options.addOption(bool, "enable_debug_trap", enable_debug_trap);
28 |
29 | const target = b.standardTargetOptions(.{});
30 | const optimize = b.standardOptimizeOption(.{});
31 |
32 | const stable_array = b.dependency("zig-stable-array", .{
33 | .target = target,
34 | .optimize = optimize,
35 | });
36 |
37 | var bench_add_one_step: *CompileStep = buildWasmExe(b, "bench/samples/add-one.zig", .Wasm32);
38 | var bench_fibonacci_step: *CompileStep = buildWasmExe(b, "bench/samples/fibonacci.zig", .Wasm32);
39 | var bench_mandelbrot_step: *CompileStep = buildWasmExe(b, "bench/samples/mandelbrot.zig", .Wasm32);
40 |
41 | const stable_array_import = ModuleImport{ .name = "stable-array", .module = stable_array.module("zig-stable-array") };
42 |
43 | const bytebox_module: *Build.Module = b.addModule("bytebox", .{
44 | .root_source_file = b.path("src/core.zig"),
45 | .imports = &[_]ModuleImport{stable_array_import},
46 | });
47 |
48 | bytebox_module.addOptions("config", options);
49 |
50 | const emit_asm_step: *Build.Step = b.step("asm", "Emit assembly");
51 |
52 | const imports = [_]ModuleImport{
53 | .{ .name = "bytebox", .module = bytebox_module },
54 | .{ .name = "stable-array", .module = stable_array.module("zig-stable-array") },
55 | };
56 |
57 | _ = buildExeWithRunStep(b, target, optimize, &imports, .{
58 | .exe_name = "bytebox",
59 | .root_src = "run/main.zig",
60 | .step_name = "run",
61 | .description = "Run a wasm program",
62 | .emit_asm_step = emit_asm_step,
63 | .options = options,
64 | });
65 |
66 | var bench_steps = [_]*Build.Step{
67 | &bench_add_one_step.step,
68 | &bench_fibonacci_step.step,
69 | &bench_mandelbrot_step.step,
70 | };
71 | _ = buildExeWithRunStep(b, target, optimize, &imports, .{
72 | .exe_name = "bench",
73 | .root_src = "bench/main.zig",
74 | .step_name = "bench",
75 | .description = "Run the benchmark suite",
76 | .step_dependencies = &bench_steps,
77 | .options = options,
78 | });
79 |
80 | const lib_bytebox: *Build.Step.Compile = b.addStaticLibrary(.{
81 | .name = "bytebox",
82 | .root_source_file = b.path("src/cffi.zig"),
83 | .target = target,
84 | .optimize = optimize,
85 | });
86 | lib_bytebox.root_module.addImport(stable_array_import.name, stable_array_import.module);
87 | lib_bytebox.root_module.addOptions("config", options);
88 | lib_bytebox.installHeader(b.path("src/bytebox.h"), "bytebox.h");
89 | b.installArtifact(lib_bytebox);
90 |
91 | // Unit tests
92 | const unit_tests: *Build.Step.Compile = b.addTest(.{
93 | .root_source_file = b.path("src/tests.zig"),
94 | .target = target,
95 | .optimize = optimize,
96 | });
97 | unit_tests.root_module.addImport(stable_array_import.name, stable_array_import.module);
98 | unit_tests.root_module.addOptions("config", options);
99 | const run_unit_tests = b.addRunArtifact(unit_tests);
100 | const unit_test_step = b.step("test-unit", "Run unit tests");
101 | unit_test_step.dependOn(&run_unit_tests.step);
102 |
103 | // wasm tests
104 | const wasm_testsuite_step = buildExeWithRunStep(b, target, optimize, &imports, .{
105 | .exe_name = "test-wasm",
106 | .root_src = "test/wasm/main.zig",
107 | .step_name = "test-wasm",
108 | .description = "Run the wasm testsuite",
109 | .options = options,
110 | });
111 |
112 | // wasi tests
113 | const wasi_testsuite = b.addSystemCommand(&.{"python3"});
114 | wasi_testsuite.addArg("test/wasi/run.py");
115 | const wasi_testsuite_step = b.step("test-wasi", "Run wasi testsuite");
116 | wasi_testsuite_step.dependOn(&wasi_testsuite.step);
117 |
118 | // mem64 test
119 | const compile_mem64_test = buildWasmExe(b, "test/mem64/memtest.zig", .Wasm64);
120 | b.getInstallStep().dependOn(&compile_mem64_test.step);
121 |
122 | const mem64_test_step: *Build.Step = buildExeWithRunStep(b, target, optimize, &imports, .{
123 | .exe_name = "test-mem64",
124 | .root_src = "test/mem64/main.zig",
125 | .step_name = "test-mem64",
126 | .description = "Run the mem64 test",
127 | .options = options,
128 | });
129 |
130 | // All tests
131 | const all_tests_step = b.step("test", "Run unit, wasm, and wasi tests");
132 | all_tests_step.dependOn(unit_test_step);
133 | all_tests_step.dependOn(wasm_testsuite_step);
134 | all_tests_step.dependOn(wasi_testsuite_step);
135 | all_tests_step.dependOn(mem64_test_step);
136 | }
137 |
138 | fn buildExeWithRunStep(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.Mode, imports: []const ModuleImport, opts: ExeOpts) *Build.Step {
139 | const exe: *Build.Step.Compile = b.addExecutable(.{
140 | .name = opts.exe_name,
141 | .root_source_file = b.path(opts.root_src),
142 | .target = target,
143 | .optimize = optimize,
144 | });
145 |
146 | for (imports) |import| {
147 | exe.root_module.addImport(import.name, import.module);
148 | }
149 | exe.root_module.addOptions("config", opts.options);
150 |
151 | if (opts.emit_asm_step) |asm_step| {
152 | const asm_filename = std.fmt.allocPrint(b.allocator, "{s}.asm", .{opts.exe_name}) catch unreachable;
153 | asm_step.dependOn(&b.addInstallFile(exe.getEmittedAsm(), asm_filename).step);
154 | }
155 |
156 | b.installArtifact(exe);
157 |
158 | if (opts.step_dependencies) |steps| {
159 | for (steps) |step| {
160 | exe.step.dependOn(step);
161 | }
162 | }
163 |
164 | const run = b.addRunArtifact(exe);
165 | run.step.dependOn(b.getInstallStep());
166 | if (b.args) |args| {
167 | run.addArgs(args);
168 | }
169 |
170 | const step: *Build.Step = b.step(opts.step_name, opts.description);
171 | step.dependOn(&run.step);
172 |
173 | return step;
174 | }
175 |
176 | const WasmArch = enum {
177 | Wasm32,
178 | Wasm64,
179 | };
180 |
181 | fn buildWasmExe(b: *Build, filepath: []const u8, arch: WasmArch) *CompileStep {
182 | var filename: []const u8 = std.fs.path.basename(filepath);
183 | const filename_no_extension: []const u8 = filename[0 .. filename.len - 4];
184 |
185 | const cpu_arch: std.Target.Cpu.Arch = if (arch == .Wasm32) .wasm32 else .wasm64;
186 |
187 | var target_query: std.Target.Query = .{
188 | .cpu_arch = cpu_arch,
189 | .os_tag = .freestanding,
190 | };
191 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.bulk_memory));
192 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.nontrapping_fptoint));
193 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.multivalue));
194 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.mutable_globals));
195 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.reference_types));
196 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.sign_ext));
197 | target_query.cpu_features_add.addFeature(@intFromEnum(std.Target.wasm.Feature.simd128));
198 |
199 | var exe = b.addExecutable(.{
200 | .name = filename_no_extension,
201 | .root_source_file = b.path(filepath),
202 | .target = b.resolveTargetQuery(target_query),
203 | .optimize = .ReleaseSmall,
204 | });
205 | exe.rdynamic = true;
206 | exe.entry = .disabled;
207 |
208 | b.installArtifact(exe);
209 |
210 | return exe;
211 | }
212 |
--------------------------------------------------------------------------------
/build.zig.zon:
--------------------------------------------------------------------------------
1 | .{
2 | .name = "bytebox",
3 | .version = "0.0.1",
4 | .minimum_zig_version = "0.13.0",
5 | .paths = .{
6 | "src",
7 | "test/mem64",
8 | "test/wasi/run.py",
9 | "test/wasi/bytebox_adapter.py",
10 | "test/wasm/main.zig",
11 | "bench",
12 | "run",
13 | "build.zig",
14 | "build.zig.zon",
15 | "LICENSE",
16 | "README.md",
17 | },
18 | .dependencies = .{
19 | .@"zig-stable-array" = .{
20 | .url = "https://github.com/rdunnington/zig-stable-array/archive/bbb120cd0e4a8a83a21217e67ab6d7697a809756.tar.gz",
21 | .hash = "12202fe89e68d38484a313816e571eca786b36775e3aa2831def7b20815fa81831f1",
22 | },
23 | },
24 | }
25 |
--------------------------------------------------------------------------------
/optimize.md:
--------------------------------------------------------------------------------
1 | == Failed Optimizations ==
2 |
3 | * Giving locals their own stack space separate from values. The idea here was to save
4 | some perf on push/pop of call frames so that we wouldn't have to copy the return values
5 | back to the appropriate place. But since the wasm calling convention is to pass params
6 | via the stack, you'd have to copy them elsewhere anyway, defeating the point of
7 | the optimization anyway, which is to avoid copying values around.
8 |
9 | * Instruction stream. Instead of having an array of structs that contain opcode + immediates,
10 | have a byte stream of opcodes and immediates where you don't have to pay for the extra memory
11 | of the immediates if you don't need them. But it turns out that a lot of instructions
12 | use immediates anyway and the overhead of fetching them out of the stream is more
13 | expensive than just paying for the cache hits. Overall memory is
14 |
--------------------------------------------------------------------------------
/run/main.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const bytebox = @import("bytebox");
3 | const config = bytebox.config;
4 | const wasi = bytebox.wasi;
5 |
6 | const Val = bytebox.Val;
7 | const ValType = bytebox.ValType;
8 | const TraceMode = bytebox.DebugTrace.Mode;
9 |
10 | const log = bytebox.Logger.default();
11 |
12 | const RunErrors = error{
13 | IoError,
14 | MissingFunction,
15 | FunctionParamMismatch,
16 | BadFunctionParam,
17 | };
18 |
19 | const CmdOpts = struct {
20 | print_help: bool = false,
21 | print_version: bool = false,
22 | print_dump: bool = false,
23 | trace: TraceMode = .None,
24 |
25 | filename: ?[]const u8 = null,
26 | invoke: ?InvokeArgs = null,
27 | invalid_arg: ?[]const u8 = null,
28 | missing_options: ?[]const u8 = null,
29 |
30 | wasm_argv: ?[][]const u8 = null,
31 | wasm_env: ?[][]const u8 = null,
32 | wasm_dirs: ?[][]const u8 = null,
33 | };
34 |
35 | const InvokeArgs = struct {
36 | funcname: []const u8,
37 | args: [][]const u8,
38 | };
39 |
40 | fn isArgvOption(arg: []const u8) bool {
41 | return arg.len > 0 and arg[0] == '-';
42 | }
43 |
44 | fn getArgSafe(index: usize, args: [][]const u8) ?[]const u8 {
45 | return if (index < args.len) args[index] else null;
46 | }
47 |
48 | fn parseCmdOpts(args: [][]const u8, env_buffer: *std.ArrayList([]const u8), dir_buffer: *std.ArrayList([]const u8)) CmdOpts {
49 | var opts = CmdOpts{};
50 |
51 | if (args.len < 2) {
52 | opts.print_help = true;
53 | }
54 |
55 | var arg_index: usize = 1;
56 | while (arg_index < args.len) {
57 | const arg = args[arg_index];
58 |
59 | if (arg_index == 1 and !isArgvOption(arg)) {
60 | opts.filename = arg;
61 | opts.wasm_argv = args[1..2];
62 | } else if (arg_index == 2 and !isArgvOption(arg)) {
63 | const wasm_argv_begin: usize = arg_index - 1; // include wasm filename
64 | var wasm_argv_end: usize = arg_index;
65 | while (wasm_argv_end + 1 < args.len and !isArgvOption(args[wasm_argv_end + 1])) {
66 | wasm_argv_end += 1;
67 | }
68 | opts.wasm_argv = args[wasm_argv_begin .. wasm_argv_end + 1];
69 | arg_index = wasm_argv_end;
70 | } else if (std.mem.eql(u8, arg, "-h") or std.mem.eql(u8, arg, "--help")) {
71 | opts.print_help = true;
72 | } else if (std.mem.eql(u8, arg, "-v") or std.mem.eql(u8, arg, "--version")) {
73 | opts.print_version = true;
74 | } else if (std.mem.eql(u8, arg, "--dump")) {
75 | if (opts.filename != null) {
76 | opts.print_dump = true;
77 | } else {
78 | opts.missing_options = arg;
79 | }
80 | } else if (std.mem.eql(u8, arg, "-i") or std.mem.eql(u8, arg, "--invoke")) {
81 | arg_index += 1;
82 | if (arg_index < args.len) {
83 | opts.invoke = InvokeArgs{
84 | .funcname = args[arg_index],
85 | .args = args[arg_index + 1 ..],
86 | };
87 | } else {
88 | opts.missing_options = arg;
89 | }
90 | arg_index = args.len;
91 | } else if (std.mem.eql(u8, arg, "-e") or std.mem.eql(u8, arg, "--env")) {
92 | arg_index += 1;
93 | if (getArgSafe(arg_index, args)) |env| {
94 | env_buffer.appendAssumeCapacity(env);
95 | } else {
96 | opts.missing_options = arg;
97 | }
98 | } else if (std.mem.eql(u8, arg, "-d") or std.mem.eql(u8, arg, "--dir")) {
99 | arg_index += 1;
100 | if (getArgSafe(arg_index, args)) |dir| {
101 | dir_buffer.appendAssumeCapacity(dir);
102 | } else {
103 | opts.missing_options = arg;
104 | }
105 | } else if (std.mem.eql(u8, arg, "-t") or std.mem.eql(u8, arg, "--trace")) {
106 | arg_index += 1;
107 | if (getArgSafe(arg_index, args)) |mode_str| {
108 | if (bytebox.DebugTrace.parseMode(mode_str)) |mode| {
109 | if (config.enable_debug_trace == false) {
110 | log.err("Bytebox was not compiled with -Ddebug_trace=true. Enable this compile time flag if you want to enable tracing at runtime.", .{});
111 | opts.invalid_arg = mode_str;
112 | } else {
113 | opts.trace = mode;
114 | }
115 | } else {
116 | opts.invalid_arg = mode_str;
117 | }
118 | } else {
119 | opts.missing_options = arg;
120 | }
121 | } else {
122 | opts.invalid_arg = arg;
123 | break;
124 | }
125 |
126 | arg_index += 1;
127 | }
128 |
129 | if (env_buffer.items.len > 0) {
130 | opts.wasm_env = env_buffer.items;
131 | }
132 |
133 | if (dir_buffer.items.len > 0) {
134 | opts.wasm_dirs = dir_buffer.items;
135 | }
136 |
137 | return opts;
138 | }
139 |
140 | const version_string = "bytebox v0.0.1";
141 |
142 | fn printHelp(args: [][]const u8) void {
143 | const usage_string: []const u8 =
144 | \\Usage: {s} [WASM_ARGS]... [OPTION]...
145 | \\
146 | \\ Options:
147 | \\
148 | \\ -h, --help
149 | \\ Print help information.
150 | \\
151 | \\ -v, --version
152 | \\ Print version information.
153 | \\
154 | \\ --dump
155 | \\ Prints the given module definition's imports and exports. Imports are qualified
156 | \\ with the import module name.
157 | \\
158 | \\ -i, --invoke [ARGS]...
159 | \\ Call an exported, named function with arguments. The arguments are automatically
160 | \\ translated from string inputs to the function's native types. If the conversion
161 | \\ is not possible, an error is printed and execution aborts.
162 | \\
163 | \\ -e, --env
164 | \\ Set an environment variable for the execution environment. Typically retrieved
165 | \\ via the WASI API environ_sizes_get() and environ_get(). Multiple instances of
166 | \\ this flag is needed to pass multiple variables.
167 | \\
168 | \\ -d, --dir
169 | \\ Allow WASI programs to access this directory and paths within it. Can be relative
170 | \\ to the current working directory or absolute. Multiple instances of this flag can
171 | \\ be used to pass multiple dirs.
172 | \\
173 | \\ -t, --trace
174 | \\ Print a trace of the wasm program as it executes. MODE can be:
175 | \\ * none (default)
176 | \\ * function
177 | \\ * instruction
178 | \\ Note that this requires bytebox to be compiled with the flag -Ddebug_trace=true,
179 | \\ which is off by default for performance reasons.
180 | \\
181 | \\
182 | ;
183 |
184 | log.info(usage_string, .{args[0]});
185 | }
186 |
187 | pub fn main() !void {
188 | var gpa = std.heap.GeneralPurposeAllocator(.{}){};
189 | var allocator: std.mem.Allocator = gpa.allocator();
190 |
191 | const args = try std.process.argsAlloc(allocator);
192 | defer std.process.argsFree(allocator, args);
193 |
194 | var env_buffer = std.ArrayList([]const u8).init(allocator);
195 | defer env_buffer.deinit();
196 | try env_buffer.ensureTotalCapacity(4096); // 4096 vars should be enough for most insane script file scenarios.
197 |
198 | var dir_buffer = std.ArrayList([]const u8).init(allocator);
199 | defer dir_buffer.deinit();
200 | try dir_buffer.ensureTotalCapacity(4096);
201 |
202 | const opts: CmdOpts = parseCmdOpts(args, &env_buffer, &dir_buffer);
203 |
204 | if (opts.print_help) {
205 | printHelp(args);
206 | return;
207 | } else if (opts.print_version) {
208 | log.info("{s}", .{version_string});
209 | return;
210 | } else if (opts.invalid_arg) |invalid_arg| {
211 | log.err("Invalid argument '{s}'.\n", .{invalid_arg});
212 | printHelp(args);
213 | return;
214 | } else if (opts.missing_options) |missing_options| {
215 | log.err("Argument {s} is missing required options.\n", .{missing_options});
216 | printHelp(args);
217 | return;
218 | } else if (opts.invoke != null and opts.filename == null) {
219 | log.err("Cannot invoke {s} without a file to load.", .{opts.invoke.?.funcname});
220 | printHelp(args);
221 | return;
222 | }
223 |
224 | if (opts.trace != .None) {
225 | bytebox.DebugTrace.setMode(opts.trace);
226 | }
227 |
228 | std.debug.assert(opts.filename != null);
229 |
230 | var cwd = std.fs.cwd();
231 | const wasm_data: []u8 = cwd.readFileAlloc(allocator, opts.filename.?, 1024 * 1024 * 128) catch |e| {
232 | std.log.err("Failed to read file '{s}' into memory: {}", .{ opts.filename.?, e });
233 | return RunErrors.IoError;
234 | };
235 | defer allocator.free(wasm_data);
236 |
237 | const module_def_opts = bytebox.ModuleDefinitionOpts{
238 | .debug_name = std.fs.path.basename(opts.filename.?),
239 | .log = log,
240 | };
241 | var module_def = try bytebox.createModuleDefinition(allocator, module_def_opts);
242 | defer module_def.destroy();
243 |
244 | module_def.decode(wasm_data) catch |e| {
245 | std.log.err("Caught error decoding module: {}", .{e});
246 | return e;
247 | };
248 |
249 | if (opts.print_dump) {
250 | var strbuf = std.ArrayList(u8).init(allocator);
251 | try strbuf.ensureTotalCapacity(1024 * 16);
252 | try module_def.dump(strbuf.writer());
253 | log.info("{s}", .{strbuf.items});
254 | return;
255 | }
256 |
257 | var module_instance = try bytebox.createModuleInstance(.Stack, module_def, allocator);
258 | defer module_instance.destroy();
259 |
260 | var imports_wasi: bytebox.ModuleImportPackage = try wasi.initImports(.{
261 | .argv = opts.wasm_argv,
262 | .env = opts.wasm_env,
263 | .dirs = opts.wasm_dirs,
264 | }, allocator);
265 | defer wasi.deinitImports(&imports_wasi);
266 |
267 | const instantiate_opts = bytebox.ModuleInstantiateOpts{
268 | .imports = &[_]bytebox.ModuleImportPackage{imports_wasi},
269 | .log = log,
270 | };
271 |
272 | module_instance.instantiate(instantiate_opts) catch |e| {
273 | std.log.err("Caught error instantiating module {}.", .{e});
274 | return e;
275 | };
276 |
277 | const invoke_funcname: []const u8 = if (opts.invoke) |invoke| invoke.funcname else "_start";
278 | const invoke_args: [][]const u8 = if (opts.invoke) |invoke| invoke.args else &[_][]u8{};
279 |
280 | const func_handle: bytebox.FunctionHandle = module_instance.getFunctionHandle(invoke_funcname) catch {
281 | // don't log an error if the user didn't explicitly try to invoke a function
282 | if (opts.invoke != null) {
283 | std.log.err("Failed to find function '{s}' - either it doesn't exist or is not a public export.", .{invoke_funcname});
284 | }
285 | return RunErrors.MissingFunction;
286 | };
287 |
288 | const func_export: bytebox.FunctionExport = module_def.getFunctionExport(func_handle);
289 |
290 | const num_params: usize = invoke_args.len;
291 | if (func_export.params.len != num_params) {
292 | var strbuf = std.ArrayList(u8).init(allocator);
293 | defer strbuf.deinit();
294 | try writeSignature(&strbuf, &func_export);
295 | std.log.err("Specified {} params but expected {}. The signature of '{s}' is:\n{s}", .{
296 | num_params,
297 | func_export.params.len,
298 | invoke_funcname,
299 | strbuf.items,
300 | });
301 | return RunErrors.FunctionParamMismatch;
302 | }
303 |
304 | std.debug.assert(invoke_args.len == num_params);
305 |
306 | var params = std.ArrayList(bytebox.Val).init(allocator);
307 | defer params.deinit();
308 | try params.resize(invoke_args.len);
309 | for (func_export.params, 0..) |valtype, i| {
310 | const arg: []const u8 = invoke_args[i];
311 | switch (valtype) {
312 | .I32 => {
313 | const parsed: i32 = std.fmt.parseInt(i32, arg, 0) catch |e| {
314 | std.log.err("Failed to parse arg at index {} ('{s}') as an i32: {}", .{ i, arg, e });
315 | return RunErrors.BadFunctionParam;
316 | };
317 | params.items[i] = Val{ .I32 = parsed };
318 | },
319 | .I64 => {
320 | const parsed: i64 = std.fmt.parseInt(i64, arg, 0) catch |e| {
321 | std.log.err("Failed to parse arg at index {} ('{s}') as an i64: {}", .{ i, arg, e });
322 | return RunErrors.BadFunctionParam;
323 | };
324 | params.items[i] = Val{ .I64 = parsed };
325 | },
326 | .F32 => {
327 | const parsed: f32 = std.fmt.parseFloat(f32, arg) catch |e| {
328 | std.log.err("Failed to parse arg at index {} ('{s}') as a f32: {}", .{ i, arg, e });
329 | return RunErrors.BadFunctionParam;
330 | };
331 | params.items[i] = Val{ .F32 = parsed };
332 | },
333 | .F64 => {
334 | const parsed: f64 = std.fmt.parseFloat(f64, arg) catch |e| {
335 | std.log.err("Failed to parse arg at index {} ('{s}') as a f64: {}", .{ i, arg, e });
336 | return RunErrors.BadFunctionParam;
337 | };
338 | params.items[i] = Val{ .F64 = parsed };
339 | },
340 | .V128 => {
341 | std.log.err("Param at index {} is a v128, which is currently only invokeable from code.", .{i});
342 | return RunErrors.BadFunctionParam;
343 | },
344 | .FuncRef => {
345 | std.log.err("Param at index {} is a v128, making this function only invokeable from code.", .{i});
346 | return RunErrors.BadFunctionParam;
347 | },
348 | .ExternRef => {
349 | std.log.err("Param at index {} is an externref, making this function only invokeable from code.", .{i});
350 | return RunErrors.BadFunctionParam;
351 | },
352 | }
353 | }
354 |
355 | var returns = std.ArrayList(bytebox.Val).init(allocator);
356 | try returns.resize(func_export.returns.len);
357 |
358 | module_instance.invoke(func_handle, params.items.ptr, returns.items.ptr, .{}) catch |e| {
359 | var backtrace = module_instance.formatBacktrace(1, allocator) catch unreachable;
360 | std.log.err("Caught {} during function invoke. Backtrace:\n{s}\n", .{ e, backtrace.items });
361 | backtrace.deinit();
362 | return e;
363 | };
364 |
365 | {
366 | var strbuf = std.ArrayList(u8).init(allocator);
367 | defer strbuf.deinit();
368 | const writer = strbuf.writer();
369 |
370 | if (returns.items.len > 0) {
371 | const return_types = func_export.returns;
372 | try std.fmt.format(writer, "return:\n", .{});
373 | for (returns.items, 0..) |_, i| {
374 | switch (return_types[i]) {
375 | .I32 => try std.fmt.format(writer, " {} (i32)\n", .{returns.items[i].I32}),
376 | .I64 => try std.fmt.format(writer, " {} (i64)\n", .{returns.items[i].I64}),
377 | .F32 => try std.fmt.format(writer, " {} (f32)\n", .{returns.items[i].F32}),
378 | .F64 => try std.fmt.format(writer, " {} (f64)\n", .{returns.items[i].F64}),
379 | .V128 => unreachable, // TODO support
380 | .FuncRef => try std.fmt.format(writer, " (funcref)\n", .{}),
381 | .ExternRef => try std.fmt.format(writer, " (externref)\n", .{}),
382 | }
383 | }
384 | try std.fmt.format(writer, "\n", .{});
385 | }
386 | if (strbuf.items.len > 0) {
387 | log.info("{s}\n", .{strbuf.items});
388 | }
389 | }
390 | }
391 |
392 | fn writeSignature(strbuf: *std.ArrayList(u8), info: *const bytebox.FunctionExport) !void {
393 | const writer = strbuf.writer();
394 | if (info.params.len == 0) {
395 | try std.fmt.format(writer, " params: none\n", .{});
396 | } else {
397 | try std.fmt.format(writer, " params:\n", .{});
398 | for (info.params) |valtype| {
399 | const name: []const u8 = valtypeToString(valtype);
400 | try std.fmt.format(writer, " {s}\n", .{name});
401 | }
402 | }
403 |
404 | if (info.returns.len == 0) {
405 | try std.fmt.format(writer, " returns: none\n", .{});
406 | } else {
407 | try std.fmt.format(writer, " returns:\n", .{});
408 | for (info.returns) |valtype| {
409 | const name: []const u8 = valtypeToString(valtype);
410 | try std.fmt.format(writer, " {s}\n", .{name});
411 | }
412 | }
413 | }
414 |
415 | fn valtypeToString(valtype: ValType) []const u8 {
416 | return switch (valtype) {
417 | inline else => |v| @typeInfo(ValType).Enum.fields[@intFromEnum(v)].name,
418 | };
419 | }
420 |
--------------------------------------------------------------------------------
/src/.clang-format:
--------------------------------------------------------------------------------
1 | ---
2 | Language: Cpp
3 | AlignAfterOpenBracket: DontAlign
4 | AlignConsecutiveAssignments: false
5 | AlignConsecutiveDeclarations: false
6 | AlignEscapedNewlines: Left
7 | AlignOperands: true
8 | AlignTrailingComments: true
9 | AllowAllParametersOfDeclarationOnNextLine: false
10 | AllowShortBlocksOnASingleLine: false
11 | AllowShortCaseLabelsOnASingleLine: false
12 | AllowShortFunctionsOnASingleLine: None
13 | AllowShortIfStatementsOnASingleLine: false
14 | AllowShortLambdasOnASingleLine: Empty
15 | AllowShortLoopsOnASingleLine: false
16 | AlwaysBreakAfterReturnType: None
17 | AlwaysBreakBeforeMultilineStrings: true
18 | AlwaysBreakTemplateDeclarations: Yes
19 | BinPackArguments: true
20 | BinPackParameters: true
21 | BreakBeforeBinaryOperators: NonAssignment
22 | BreakBeforeBraces: Allman
23 | BreakBeforeInheritanceComma: true
24 | BreakBeforeTernaryOperators: true
25 | BreakConstructorInitializers: BeforeComma
26 | BreakStringLiterals: true
27 | ColumnLimit: 0
28 | CommentPragmas: '^ IWYU pragma:'
29 | CompactNamespaces: true
30 | ConstructorInitializerIndentWidth: 4
31 | ContinuationIndentWidth: 4
32 | DerivePointerAlignment: false
33 | DisableFormat: false
34 | FixNamespaceComments: true
35 | IndentCaseLabels: true
36 | IndentPPDirectives: None
37 | IndentWidth: 4
38 | IndentWrappedFunctionNames: false
39 | JavaScriptQuotes: Leave
40 | JavaScriptWrapImports: true
41 | KeepEmptyLinesAtTheStartOfBlocks: false
42 | MacroBlockBegin: ''
43 | MacroBlockEnd: ''
44 | MaxEmptyLinesToKeep: 1
45 | PackConstructorInitializers: Never
46 | PenaltyBreakAssignment: 2
47 | PenaltyBreakBeforeFirstCallParameter: 1
48 | PenaltyBreakComment: 300
49 | PenaltyBreakFirstLessLess: 120
50 | PenaltyBreakString: 1000
51 | PenaltyBreakTemplateDeclaration: 10
52 | PenaltyExcessCharacter: 1000000
53 | PenaltyReturnTypeOnItsOwnLine: 200
54 | PointerAlignment: Left
55 | ReflowComments: false
56 | SortIncludes: true
57 | SpaceAfterCStyleCast: false
58 | SpaceBeforeAssignmentOperators: true
59 | SpaceBeforeParens: ControlStatements
60 | SpaceBeforeRangeBasedForLoopColon: true
61 | SpaceInEmptyParentheses: false
62 | SpacesBeforeTrailingComments: 4
63 | SpacesInAngles: false
64 | SpacesInContainerLiterals: true
65 | SpacesInCStyleCastParentheses: false
66 | SpacesInParentheses: false
67 | SpacesInSquareBrackets: false
68 | Standard: Auto
69 | TabWidth: 4
70 | UseTab: Always
71 |
--------------------------------------------------------------------------------
/src/bytebox.h:
--------------------------------------------------------------------------------
1 | // C interface for bytebox wasm runtime.
2 |
3 | #include
4 | #include
5 | #include
6 |
7 | struct bb_slice
8 | {
9 | char* data;
10 | size_t length;
11 | };
12 | typedef struct bb_slice bb_slice;
13 |
14 | enum bb_error
15 | {
16 | BB_ERROR_OK,
17 | BB_ERROR_FAILED,
18 | BB_ERROR_OUTOFMEMORY,
19 | BB_ERROR_INVALIDPARAM,
20 | BB_ERROR_UNKNOWNEXPORT,
21 | BB_ERROR_UNKNOWNIMPORT,
22 | BB_ERROR_INCOMPATIBLEIMPORT,
23 | BB_ERROR_TRAP_DEBUG,
24 | BB_ERROR_TRAP_UNREACHABLE,
25 | BB_ERROR_TRAP_INTEGERDIVISIONBYZERO,
26 | BB_ERROR_TRAP_INTEGEROVERFLOW,
27 | BB_ERROR_TRAP_INDIRECTCALLTYPEMISMATCH,
28 | BB_ERROR_TRAP_INVALIDINTEGERCONVERSION,
29 | BB_ERROR_TRAP_OUTOFBOUNDSMEMORYACCESS,
30 | BB_ERROR_TRAP_UNDEFINEDELEMENT,
31 | BB_ERROR_TRAP_UNINITIALIZEDELEMENT,
32 | BB_ERROR_TRAP_OUTOFBOUNDSTABLEACCESS,
33 | BB_ERROR_TRAP_STACKEXHAUSTED,
34 | };
35 | typedef enum bb_error bb_error;
36 |
37 | enum bb_valtype
38 | {
39 | BB_VALTYPE_I32,
40 | BB_VALTYPE_I64,
41 | BB_VALTYPE_F32,
42 | BB_VALTYPE_F64,
43 | };
44 | typedef enum bb_valtype bb_valtype;
45 |
46 | typedef float bb_v128[4];
47 | union bb_val
48 | {
49 | int32_t i32_val;
50 | int64_t i64_val;
51 | float f32_val;
52 | double f64_val;
53 | bb_v128 v128_val;
54 | uint32_t externref_val;
55 | };
56 | typedef union bb_val bb_val;
57 |
58 | struct bb_module_definition_init_opts
59 | {
60 | const char* debug_name;
61 | };
62 | typedef struct bb_module_definition_init_opts bb_module_definition_init_opts;
63 |
64 | typedef struct bb_module_definition bb_module_definition;
65 | typedef struct bb_module_instance bb_module_instance;
66 | typedef struct bb_import_package bb_import_package;
67 |
68 | typedef void bb_host_function(void* userdata, bb_module_instance* module, const bb_val* params, bb_val* returns);
69 | typedef void* bb_wasm_memory_resize(void* mem, size_t new_size_bytes, size_t old_size_bytes, void* userdata);
70 | typedef void bb_wasm_memory_free(void* mem, size_t size_bytes, void* userdata);
71 |
72 | struct bb_wasm_memory_config
73 | {
74 | bb_wasm_memory_resize* resize_callback;
75 | bb_wasm_memory_free* free_callback;
76 | void* userdata;
77 | };
78 | typedef struct bb_wasm_memory_config bb_wasm_memory_config;
79 |
80 | struct bb_module_instance_instantiate_opts
81 | {
82 | bb_import_package** packages;
83 | size_t num_packages;
84 | bb_wasm_memory_config wasm_memory_config;
85 | size_t stack_size;
86 | bool enable_debug;
87 | };
88 | typedef struct bb_module_instance_instantiate_opts bb_module_instance_instantiate_opts;
89 |
90 | struct bb_module_instance_invoke_opts
91 | {
92 | bool trap_on_start;
93 | };
94 | typedef struct bb_module_instance_invoke_opts bb_module_instance_invoke_opts;
95 |
96 | struct bb_func_handle
97 | {
98 | uint32_t index;
99 | uint32_t type;
100 | };
101 | typedef struct bb_func_handle bb_func_handle;
102 |
103 | struct bb_func_info
104 | {
105 | bb_valtype* params;
106 | size_t num_params;
107 | bb_valtype* returns;
108 | size_t num_returns;
109 | };
110 | typedef struct bb_func_info bb_func_info;
111 |
112 | enum bb_global_mut
113 | {
114 | BB_GLOBAL_MUT_IMMUTABLE,
115 | BB_GLOBAL_MUT_MUTABLE,
116 | };
117 | typedef enum bb_global_mut bb_global_mut;
118 |
119 | struct bb_global
120 | {
121 | bb_val* value;
122 | bb_valtype type;
123 | bb_global_mut mut;
124 | };
125 | typedef struct bb_global bb_global;
126 |
127 | enum bb_debug_trace_mode
128 | {
129 | BB_DEBUG_TRACE_NONE,
130 | BB_DEBUG_TRACE_FUNCTION,
131 | BB_DEBUG_TRACE_INSTRUCTION,
132 | };
133 | typedef enum bb_debug_trace_mode bb_debug_trace_mode;
134 |
135 | enum bb_debug_trap_mode
136 | {
137 | BB_DEBUG_TRAP_MODE_DISABLED,
138 | BB_DEBUG_TRAP_MODE_ENABLED,
139 | };
140 | typedef enum bb_debug_trap_mode bb_debug_trap_mode;
141 |
142 | const char* bb_error_str(bb_error err);
143 |
144 | bb_module_definition* bb_module_definition_create(bb_module_definition_init_opts opts);
145 | void bb_module_definition_destroy(bb_module_definition* definition);
146 | bb_error bb_module_definition_decode(bb_module_definition* definition, const char* data, size_t length);
147 | bb_slice bb_module_definition_get_custom_section(const bb_module_definition* definition, const char* name);
148 |
149 | bb_import_package* bb_import_package_init(const char* name);
150 | void bb_import_package_deinit(bb_import_package* package); // only deinit when all module_instances using the package have been destroyed
151 | bb_error bb_import_package_add_function(bb_import_package* package, bb_host_function* func, const char* export_name, const bb_valtype* params, size_t num_params, const bb_valtype* returns, size_t num_returns, void* userdata);
152 | bb_error bb_import_package_add_memory(bb_import_package* package, const bb_wasm_memory_config* config, const char* export_name, uint32_t min_pages, uint32_t max_pages);
153 |
154 | void bb_set_debug_trace_mode(bb_debug_trace_mode mode);
155 |
156 | bb_module_instance* bb_module_instance_create(bb_module_definition* definition);
157 | void bb_module_instance_destroy(bb_module_instance* instance);
158 | bb_error bb_module_instance_instantiate(bb_module_instance* instance, bb_module_instance_instantiate_opts opts);
159 | bb_error bb_module_instance_find_func(bb_module_instance* instance, const char* func_name, bb_func_handle* out_handle);
160 | bb_func_info bb_module_instance_func_info(bb_module_instance* instance, bb_func_handle handle);
161 | bb_error bb_module_instance_invoke(bb_module_instance* instance, bb_func_handle, const bb_val* params, size_t num_params, bb_val* returns, size_t num_returns, bb_module_instance_invoke_opts opts);
162 | bb_error bb_module_instance_resume(bb_module_instance* instance, bb_val* returns, size_t num_returns);
163 | bb_error bb_module_instance_step(bb_module_instance* instance, bb_val* returns, size_t num_returns);
164 | bb_error bb_module_instance_debug_set_trap(bb_module_instance* instance, uint32_t address, bb_debug_trap_mode trap_mode);
165 | void* bb_module_instance_mem(bb_module_instance* instance, size_t offset, size_t length);
166 | bb_slice bb_module_instance_mem_all(bb_module_instance* instance);
167 | bb_error bb_module_instance_mem_grow(bb_module_instance* instance, size_t num_pages);
168 | bb_error bb_module_instance_mem_grow_absolute(bb_module_instance* instance, size_t total_pages);
169 | bb_global bb_module_instance_find_global(bb_module_instance* instance, const char* global_name);
170 |
171 | bool bb_func_handle_isvalid(bb_func_handle handle);
172 |
--------------------------------------------------------------------------------
/src/cffi.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const builtin = @import("builtin");
3 | const AllocError = std.mem.Allocator.Error;
4 |
5 | const core = @import("core.zig");
6 | const ValType = core.ValType;
7 | const Val = core.Val;
8 | const ModuleDefinition = core.ModuleDefinition;
9 | const ModuleInstance = core.ModuleInstance;
10 | const ModuleImportPackage = core.ModuleImportPackage;
11 |
12 | const StableArray = @import("stable-array").StableArray;
13 |
14 | // C interface
15 | const CSlice = extern struct {
16 | data: ?[*]u8,
17 | length: usize,
18 | };
19 |
20 | const CError = enum(c_int) {
21 | Ok,
22 | Failed,
23 | OutOfMemory,
24 | InvalidParameter,
25 | UnknownExport,
26 | UnknownImport,
27 | IncompatibleImport,
28 | TrapDebug,
29 | TrapUnreachable,
30 | TrapIntegerDivisionByZero,
31 | TrapIntegerOverflow,
32 | TrapIndirectCallTypeMismatch,
33 | TrapInvalidIntegerConversion,
34 | TrapOutOfBoundsMemoryAccess,
35 | TrapUndefinedElement,
36 | TrapUninitializedElement,
37 | TrapOutOfBoundsTableAccess,
38 | TrapStackExhausted,
39 | };
40 |
41 | const CModuleDefinitionInitOpts = extern struct {
42 | debug_name: ?[*:0]u8,
43 | };
44 |
45 | const CHostFunction = *const fn (userdata: ?*anyopaque, module: *core.ModuleInstance, params: [*]const Val, returns: [*]Val) void;
46 |
47 | const CWasmMemoryConfig = extern struct {
48 | resize: ?core.WasmMemoryResizeFunction,
49 | free: ?core.WasmMemoryFreeFunction,
50 | userdata: ?*anyopaque,
51 | };
52 |
53 | const CModuleInstanceInstantiateOpts = extern struct {
54 | packages: ?[*]?*const ModuleImportPackage,
55 | num_packages: usize,
56 | wasm_memory_config: CWasmMemoryConfig,
57 | stack_size: usize,
58 | enable_debug: bool,
59 | };
60 |
61 | const CModuleInstanceInvokeOpts = extern struct {
62 | trap_on_start: bool,
63 | };
64 |
65 | const CFuncHandle = extern struct {
66 | index: u32,
67 | type: u32,
68 | };
69 |
70 | const CFuncInfo = extern struct {
71 | params: ?[*]const ValType,
72 | num_params: usize,
73 | returns: ?[*]const ValType,
74 | num_returns: usize,
75 | };
76 |
77 | const CDebugTraceMode = enum(c_int) {
78 | None,
79 | Function,
80 | Instruction,
81 | };
82 |
83 | const CDebugTrapMode = enum(c_int) {
84 | Disabled,
85 | Enabled,
86 | };
87 |
88 | const CGlobalMut = enum(c_int) {
89 | Immutable = 0,
90 | Mutable = 1,
91 | };
92 |
93 | const CGlobalExport = extern struct {
94 | value: ?*Val,
95 | type: ValType,
96 | mut: CGlobalMut,
97 | };
98 |
99 | // TODO logging callback as well?
100 | // TODO allocator hooks
101 | // const CAllocFunc = *const fn (size: usize, userdata: ?*anyopaque) ?*anyopaque;
102 | // const CReallocFunc = *const fn (mem: ?*anyopaque, size: usize, userdata: ?*anyopaque) ?*anyopaque;
103 | // const CFreeFunc = *const fn (mem: ?*anyopaque, userdata: ?*anyopaque) void;
104 |
105 | const INVALID_FUNC_INDEX = std.math.maxInt(u32);
106 |
107 | var cffi_gpa = std.heap.GeneralPurposeAllocator(.{}){};
108 |
109 | // const CAllocator = struct {
110 | // const AllocError = std.mem.Allocator.Error;
111 |
112 | // fallback: FallbackAllocator,
113 | // alloc_func: ?CAllocFunc = null,
114 | // realloc_func: ?CReallocFunc = null,
115 | // free_func: ?CFreeFunc = null,
116 | // userdata: ?*anyopaque = null,
117 |
118 | // fn allocator(self: *CAllocator) std.mem.Allocator() {
119 | // if (alloc_func != null and realloc_func != null and free_func != null) {
120 | // return std.mem.Allocator.init(
121 | // self,
122 | // alloc,
123 | // resize,
124 | // free
125 | // );
126 | // } else {
127 | // return fallback.allocator();
128 | // }
129 | // }
130 |
131 | // fn alloc(ptr: *anyopaque, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) AllocError![]u8 {
132 | // _ = ret_addr;
133 |
134 | // var allocator = @ptrCast(*CAllocator, @alignCast(@alignOf(CAllocator), ptr));
135 | // const size =
136 | // const mem_or_null: ?[*]anyopaque = allocator.alloc_func(size, allocator.userdata);
137 | // if (mem_or_null) |mem| {
138 | // var bytes = @ptrCast([*]u8, @alignCast(1, mem));
139 | // return bytes[0..size];
140 | // } else {
141 | // return AllocError.OutOfMemory;
142 | // }
143 | // }
144 |
145 | // fn resize(ptr: *anyopaque, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize {
146 |
147 | // }
148 |
149 | // fn free(ptr: *anyopaque, buf: []u8, buf_align: u29, ret_addr: usize) void {
150 |
151 | // }
152 | // };
153 |
154 | // var cffi_allocator = CAllocator{ .fallback = FallbackAllocator{} };
155 |
156 | // export fn bb_set_memory_hooks(alloc_func: CAllocFunc, realloc_func: CReallocFunc, free_func: CFreeFunc, userdata: ?*anyopaque) void {
157 | // cffi_allocator.alloc_func = alloc_func;
158 | // cffi_allocator.realloc_func = realloc_func;
159 | // cffi_allocator.free_func = free_func;
160 | // cffi_allocator.userdata = userdata;
161 | // }
162 |
163 | export fn bb_error_str(c_error: CError) [*:0]const u8 {
164 | return switch (c_error) {
165 | .Ok => "BB_ERROR_OK",
166 | .Failed => "BB_ERROR_FAILED",
167 | .OutOfMemory => "BB_ERROR_OUTOFMEMORY",
168 | .InvalidParameter => "BB_ERROR_INVALIDPARAMETER",
169 | .UnknownExport => "BB_ERROR_UNKNOWNEXPORT",
170 | .UnknownImport => "BB_ERROR_UNKNOWNIMPORT",
171 | .IncompatibleImport => "BB_ERROR_INCOMPATIBLEIMPORT",
172 | .TrapDebug => "BB_ERROR_TRAP_DEBUG",
173 | .TrapUnreachable => "BB_ERROR_TRAP_UNREACHABLE",
174 | .TrapIntegerDivisionByZero => "BB_ERROR_TRAP_INTEGERDIVISIONBYZERO",
175 | .TrapIntegerOverflow => "BB_ERROR_TRAP_INTEGEROVERFLOW",
176 | .TrapIndirectCallTypeMismatch => "BB_ERROR_TRAP_INDIRECTCALLTYPEMISMATCH",
177 | .TrapInvalidIntegerConversion => "BB_ERROR_TRAP_INVALIDINTEGERCONVERSION",
178 | .TrapOutOfBoundsMemoryAccess => "BB_ERROR_TRAP_OUTOFBOUNDSMEMORYACCESS",
179 | .TrapUndefinedElement => "BB_ERROR_TRAP_UNDEFINEDELEMENT",
180 | .TrapUninitializedElement => "BB_ERROR_TRAP_UNINITIALIZEDELEMENT",
181 | .TrapOutOfBoundsTableAccess => "BB_ERROR_TRAP_OUTOFBOUNDSTABLEACCESS",
182 | .TrapStackExhausted => "BB_ERROR_TRAP_STACKEXHAUSTED",
183 | };
184 | }
185 |
186 | export fn bb_module_definition_create(c_opts: CModuleDefinitionInitOpts) ?*core.ModuleDefinition {
187 | const allocator = cffi_gpa.allocator();
188 |
189 | const debug_name: []const u8 = if (c_opts.debug_name == null) "" else std.mem.sliceTo(c_opts.debug_name.?, 0);
190 | const opts_translated = core.ModuleDefinitionOpts{
191 | .debug_name = debug_name,
192 | };
193 | return core.createModuleDefinition(allocator, opts_translated) catch null;
194 | }
195 |
196 | export fn bb_module_definition_destroy(module: ?*core.ModuleDefinition) void {
197 | if (module) |m| {
198 | m.destroy();
199 | }
200 | }
201 |
202 | export fn bb_module_definition_decode(module: ?*core.ModuleDefinition, data: ?[*]u8, length: usize) CError {
203 | if (module != null and data != null) {
204 | const data_slice = data.?[0..length];
205 | if (module.?.decode(data_slice)) {
206 | return .Ok;
207 | } else |_| {
208 | return CError.Failed;
209 | }
210 | }
211 |
212 | return CError.InvalidParameter;
213 | }
214 |
215 | export fn bb_module_definition_get_custom_section(module: ?*core.ModuleDefinition, name: ?[*:0]const u8) CSlice {
216 | if (module != null and name != null) {
217 | const name_slice: []const u8 = std.mem.sliceTo(name.?, 0);
218 | if (module.?.getCustomSection(name_slice)) |section_data| {
219 | return CSlice{
220 | .data = section_data.ptr,
221 | .length = section_data.len,
222 | };
223 | }
224 | }
225 |
226 | return CSlice{
227 | .data = null,
228 | .length = 0,
229 | };
230 | }
231 |
232 | export fn bb_import_package_init(c_name: ?[*:0]const u8) ?*ModuleImportPackage {
233 | var package: ?*ModuleImportPackage = null;
234 | var allocator = cffi_gpa.allocator();
235 |
236 | if (c_name != null) {
237 | package = allocator.create(ModuleImportPackage) catch null;
238 |
239 | if (package) |p| {
240 | const name: []const u8 = std.mem.sliceTo(c_name.?, 0);
241 | p.* = ModuleImportPackage.init(name, null, null, allocator) catch {
242 | allocator.destroy(p);
243 | return null;
244 | };
245 | }
246 | }
247 |
248 | return package;
249 | }
250 |
251 | export fn bb_import_package_deinit(package: ?*ModuleImportPackage) void {
252 | if (package) |p| {
253 | p.deinit();
254 | }
255 | }
256 |
257 | export fn bb_import_package_add_function(package: ?*ModuleImportPackage, func: ?CHostFunction, c_name: ?[*:0]const u8, c_params: ?[*]ValType, num_params: usize, c_returns: ?[*]ValType, num_returns: usize, userdata: ?*anyopaque) CError {
258 | if (package != null and c_name != null and func != null) {
259 | if (num_params > 0 and c_params == null) {
260 | return CError.InvalidParameter;
261 | }
262 | if (num_returns > 0 and c_returns == null) {
263 | return CError.InvalidParameter;
264 | }
265 |
266 | const name: []const u8 = std.mem.sliceTo(c_name.?, 0);
267 | const param_types: []ValType = if (c_params) |params| params[0..num_params] else &[_]ValType{};
268 | const return_types: []ValType = if (c_returns) |returns| returns[0..num_returns] else &[_]ValType{};
269 |
270 | package.?.addHostFunction(name, param_types, return_types, func.?, userdata) catch {
271 | return CError.OutOfMemory;
272 | };
273 |
274 | return CError.Ok;
275 | }
276 |
277 | return CError.InvalidParameter;
278 | }
279 |
280 | export fn bb_import_package_add_memory(package: ?*ModuleImportPackage, config: ?*CWasmMemoryConfig, c_name: ?[*:0]const u8, min_pages: u32, max_pages: u32) CError {
281 | if (package != null and config != null and c_name != null) {
282 | if ((package.?.memories.items.len > 0)) {
283 | return CError.InvalidParameter;
284 | }
285 | if (config.?.resize == null) {
286 | return CError.InvalidParameter;
287 | }
288 | if (config.?.free == null) {
289 | return CError.InvalidParameter;
290 | }
291 |
292 | const name: []const u8 = std.mem.sliceTo(c_name.?, 0);
293 | const limits = core.Limits{
294 | .min = min_pages,
295 | .max = max_pages,
296 | .limit_type = 1,
297 | };
298 |
299 | var allocator: *std.mem.Allocator = &package.?.allocator;
300 |
301 | var mem_instance = allocator.create(core.MemoryInstance) catch return CError.OutOfMemory;
302 |
303 | const wasm_memory_config = core.WasmMemoryExternal{
304 | .resize_callback = config.?.resize.?,
305 | .free_callback = config.?.free.?,
306 | .userdata = config.?.userdata,
307 | };
308 |
309 | mem_instance.* = core.MemoryInstance.init(limits, wasm_memory_config);
310 | if (mem_instance.grow(limits.min) == false) {
311 | unreachable;
312 | }
313 |
314 | const mem_import = core.MemoryImport{
315 | .name = name,
316 | .data = .{ .Host = mem_instance },
317 | };
318 |
319 | package.?.memories.append(mem_import) catch {
320 | mem_instance.deinit();
321 | allocator.destroy(mem_instance);
322 | return CError.OutOfMemory;
323 | };
324 | }
325 |
326 | return CError.InvalidParameter;
327 | }
328 |
329 | export fn bb_set_debug_trace_mode(c_mode: CDebugTraceMode) void {
330 | const mode = switch (c_mode) {
331 | .None => core.DebugTrace.Mode.None,
332 | .Function => core.DebugTrace.Mode.Function,
333 | .Instruction => core.DebugTrace.Mode.Instruction,
334 | };
335 | _ = core.DebugTrace.setMode(mode);
336 | }
337 |
338 | export fn bb_module_instance_create(module_definition: ?*ModuleDefinition) ?*ModuleInstance {
339 | const allocator = cffi_gpa.allocator();
340 |
341 | var module: ?*core.ModuleInstance = null;
342 |
343 | if (module_definition) |def| {
344 | module = core.createModuleInstance(.Stack, def, allocator) catch null;
345 | }
346 |
347 | return module;
348 | }
349 |
350 | export fn bb_module_instance_destroy(module: ?*ModuleInstance) void {
351 | if (module) |m| {
352 | m.destroy();
353 | }
354 | }
355 |
356 | export fn bb_module_instance_instantiate(module: ?*ModuleInstance, c_opts: CModuleInstanceInstantiateOpts) CError {
357 | // Both wasm memory config callbacks must be set or null - partially overriding the behavior isn't valid
358 | var num_wasm_memory_callbacks: u32 = 0;
359 | num_wasm_memory_callbacks += if (c_opts.wasm_memory_config.resize != null) 1 else 0;
360 | num_wasm_memory_callbacks += if (c_opts.wasm_memory_config.free != null) 1 else 0;
361 |
362 | if (module != null and c_opts.packages != null and num_wasm_memory_callbacks != 1) {
363 | const packages: []?*const ModuleImportPackage = c_opts.packages.?[0..c_opts.num_packages];
364 |
365 | const allocator = cffi_gpa.allocator();
366 | var flat_packages = std.ArrayList(ModuleImportPackage).init(allocator);
367 | defer flat_packages.deinit();
368 |
369 | flat_packages.ensureTotalCapacityPrecise(packages.len) catch return CError.OutOfMemory;
370 | for (packages) |p| {
371 | if (p != null) {
372 | flat_packages.appendAssumeCapacity(p.?.*);
373 | }
374 | }
375 |
376 | var opts = core.ModuleInstantiateOpts{
377 | .imports = flat_packages.items,
378 | .stack_size = c_opts.stack_size,
379 | .enable_debug = c_opts.enable_debug,
380 | };
381 |
382 | if (num_wasm_memory_callbacks > 0) {
383 | opts.wasm_memory_external = core.WasmMemoryExternal{
384 | .resize_callback = c_opts.wasm_memory_config.resize.?,
385 | .free_callback = c_opts.wasm_memory_config.free.?,
386 | .userdata = c_opts.wasm_memory_config.userdata,
387 | };
388 | }
389 |
390 | if (module.?.instantiate(opts)) {
391 | return CError.Ok;
392 | } else |err| {
393 | return translateError(err);
394 | }
395 | }
396 |
397 | return CError.InvalidParameter;
398 | }
399 |
400 | export fn bb_module_instance_find_func(module: ?*ModuleInstance, c_func_name: ?[*:0]const u8, out_handle: ?*CFuncHandle) CError {
401 | if (module != null and c_func_name != null and out_handle != null) {
402 | const func_name = std.mem.sliceTo(c_func_name.?, 0);
403 |
404 | out_handle.?.index = INVALID_FUNC_INDEX;
405 |
406 | if (module.?.getFunctionHandle(func_name)) |handle| {
407 | out_handle.?.index = handle.index;
408 | out_handle.?.type = @intFromEnum(handle.type);
409 | return CError.Ok;
410 | } else |err| {
411 | std.debug.assert(err == error.ExportUnknownFunction);
412 | return CError.UnknownExport;
413 | }
414 | }
415 |
416 | return CError.InvalidParameter;
417 | }
418 |
419 | export fn bb_module_instance_func_info(module: ?*ModuleInstance, c_func_handle: CFuncHandle) CFuncInfo {
420 | if (module != null and c_func_handle.index != INVALID_FUNC_INDEX) {
421 | if (std.meta.intToEnum(core.FunctionHandleType, c_func_handle.type)) |handle_type| {
422 | const func_handle = core.FunctionHandle{
423 | .index = c_func_handle.index,
424 | .type = handle_type,
425 | };
426 |
427 | const maybe_info: ?core.FunctionExport = module.?.getFunctionInfo(func_handle);
428 | if (maybe_info) |info| {
429 | return CFuncInfo{
430 | .params = if (info.params.len > 0) info.params.ptr else null,
431 | .num_params = info.params.len,
432 | .returns = if (info.returns.len > 0) info.returns.ptr else null,
433 | .num_returns = info.returns.len,
434 | };
435 | }
436 | } else |_| {} // intToEnum failed, user must have passed invalid data
437 | }
438 |
439 | return CFuncInfo{
440 | .params = null,
441 | .num_params = 0,
442 | .returns = null,
443 | .num_returns = 0,
444 | };
445 | }
446 |
447 | export fn bb_module_instance_invoke(module: ?*ModuleInstance, c_handle: CFuncHandle, params: ?[*]const Val, num_params: usize, returns: ?[*]Val, num_returns: usize, opts: CModuleInstanceInvokeOpts) CError {
448 | if (module != null and c_handle.index != INVALID_FUNC_INDEX) {
449 | const handle = core.FunctionHandle{
450 | .index = c_handle.index,
451 | .type = @as(core.FunctionHandleType, @enumFromInt(c_handle.type)),
452 | };
453 |
454 | const invoke_opts = core.InvokeOpts{
455 | .trap_on_start = opts.trap_on_start,
456 | };
457 |
458 | const params_slice: []const Val = if (params != null) params.?[0..num_params] else &[_]Val{};
459 | const returns_slice: []Val = if (returns != null) returns.?[0..num_returns] else &[_]Val{};
460 |
461 | if (module.?.invoke(handle, params_slice.ptr, returns_slice.ptr, invoke_opts)) {
462 | return CError.Ok;
463 | } else |err| {
464 | return translateError(err);
465 | }
466 | }
467 |
468 | return CError.InvalidParameter;
469 | }
470 |
471 | export fn bb_module_instance_resume(module: ?*ModuleInstance, returns: ?[*]Val, num_returns: usize) CError {
472 | _ = module;
473 | _ = returns;
474 | _ = num_returns;
475 | return CError.Failed;
476 | }
477 |
478 | export fn bb_module_instance_step(module: ?*ModuleInstance, returns: ?[*]Val, num_returns: usize) CError {
479 | _ = module;
480 | _ = returns;
481 | _ = num_returns;
482 | return CError.Failed;
483 | }
484 |
485 | export fn bb_module_instance_debug_set_trap(module: ?*ModuleInstance, address: u32, trap_mode: CDebugTrapMode) CError {
486 | _ = module;
487 | _ = address;
488 | _ = trap_mode;
489 | return CError.Failed;
490 | }
491 |
492 | export fn bb_module_instance_mem(module: ?*ModuleInstance, offset: usize, length: usize) ?*anyopaque {
493 | if (module != null and length > 0) {
494 | const mem = module.?.memorySlice(offset, length);
495 | return if (mem.len > 0) mem.ptr else null;
496 | }
497 |
498 | return null;
499 | }
500 |
501 | export fn bb_module_instance_mem_all(module: ?*ModuleInstance) CSlice {
502 | if (module != null) {
503 | const mem = module.?.memoryAll();
504 | return CSlice{
505 | .data = mem.ptr,
506 | .length = mem.len,
507 | };
508 | }
509 |
510 | return CSlice{
511 | .data = null,
512 | .length = 0,
513 | };
514 | }
515 |
516 | export fn bb_module_instance_mem_grow(module: ?*ModuleInstance, num_pages: usize) CError {
517 | if (module != null) {
518 | if (module.?.memoryGrow(num_pages)) {
519 | return CError.Ok;
520 | } else {
521 | return CError.Failed;
522 | }
523 | }
524 | return CError.InvalidParameter;
525 | }
526 |
527 | export fn bb_module_instance_mem_grow_absolute(module: ?*ModuleInstance, total_pages: usize) CError {
528 | if (module != null) {
529 | if (module.?.memoryGrowAbsolute(total_pages)) {
530 | return CError.Ok;
531 | } else {
532 | return CError.Failed;
533 | }
534 | }
535 | return CError.InvalidParameter;
536 | }
537 |
538 | export fn bb_module_instance_find_global(module: ?*ModuleInstance, c_global_name: ?[*:0]const u8) CGlobalExport {
539 | comptime {
540 | std.debug.assert(@intFromEnum(CGlobalMut.Immutable) == @intFromEnum(core.GlobalMut.Immutable));
541 | std.debug.assert(@intFromEnum(CGlobalMut.Mutable) == @intFromEnum(core.GlobalMut.Mutable));
542 | }
543 |
544 | if (module != null and c_global_name != null) {
545 | const global_name = std.mem.sliceTo(c_global_name.?, 0);
546 | if (module.?.getGlobalExport(global_name)) |global| {
547 | return CGlobalExport{
548 | .value = global.val,
549 | .type = global.valtype,
550 | .mut = @as(CGlobalMut, @enumFromInt(@intFromEnum(global.mut))),
551 | };
552 | } else |_| {}
553 | }
554 |
555 | return CGlobalExport{
556 | .value = null,
557 | .type = .I32,
558 | .mut = .Immutable,
559 | };
560 | }
561 |
562 | export fn bb_func_handle_isvalid(c_handle: CFuncHandle) bool {
563 | return c_handle.index != INVALID_FUNC_INDEX;
564 | }
565 |
566 | ///////////////////////////////////////////////////////////////////////////////////////////////////
567 | // Local helpers
568 |
569 | fn translateError(err: anyerror) CError {
570 | switch (err) {
571 | error.OutOfMemory => return CError.OutOfMemory,
572 | error.UnlinkableUnknownImport => return CError.UnknownImport,
573 | error.UnlinkableIncompatibleImportType => return CError.IncompatibleImport,
574 | error.TrapDebug => return CError.TrapDebug,
575 | error.TrapUnreachable => return CError.TrapUnreachable,
576 | error.TrapIntegerDivisionByZero => return CError.TrapIntegerDivisionByZero,
577 | error.TrapIntegerOverflow => return CError.TrapIntegerOverflow,
578 | error.TrapIndirectCallTypeMismatch => return CError.TrapIndirectCallTypeMismatch,
579 | error.TrapInvalidIntegerConversion => return CError.TrapInvalidIntegerConversion,
580 | error.TrapOutOfBoundsMemoryAccess => return CError.TrapOutOfBoundsMemoryAccess,
581 | error.TrapUndefinedElement => return CError.TrapUndefinedElement,
582 | error.TrapUninitializedElement => return CError.TrapUninitializedElement,
583 | error.TrapOutOfBoundsTableAccess => return CError.TrapOutOfBoundsTableAccess,
584 | error.TrapStackExhausted => return CError.TrapStackExhausted,
585 | else => return CError.Failed,
586 | }
587 | }
588 |
589 | ///////////////////////////////////////////////////////////////////////////////////////////////////
590 | // MSVC linking compat
591 |
592 | // NOTE: Zig expects various chkstk functions to be present during linking, which would be fine if
593 | // zig or clang linked this code, but when linking a static lib with the MSVC compiler, the compiler
594 | // runtime has different names for these functions. Here we borrow the compiler_rt stack_probe.zig
595 | // file and adapt it for our uses to ensure we can link with both clang and msvc runtimes.
596 |
597 | comptime {
598 | if (builtin.os.tag == .windows) {
599 | const is_mingw = builtin.os.tag == .windows and builtin.abi.isGnu();
600 |
601 | // Default stack-probe functions emitted by LLVM
602 | if (is_mingw) {
603 | @export(_chkstk, .{ .name = "_alloca", .linkage = .weak });
604 | @export(___chkstk_ms, .{ .name = "___chkstk_ms", .linkage = .weak });
605 |
606 | if (builtin.cpu.arch.isAARCH64()) {
607 | @export(__chkstk, .{ .name = "__chkstk", .linkage = .weak });
608 | }
609 | } else if (!builtin.link_libc) {
610 | // This symbols are otherwise exported by MSVCRT.lib
611 | @export(_chkstk, .{ .name = "_chkstk", .linkage = .weak });
612 | @export(__chkstk, .{ .name = "__chkstk", .linkage = .weak });
613 | }
614 | }
615 |
616 | switch (builtin.cpu.arch) {
617 | .x86,
618 | .x86_64,
619 | => {
620 | @export(zig_probe_stack, .{ .name = "__zig_probe_stack", .linkage = .weak });
621 | },
622 | else => {},
623 | }
624 | }
625 |
626 | // Zig's own stack-probe routine (available only on x86 and x86_64)
627 | fn zig_probe_stack() callconv(.Naked) void {
628 | @setRuntimeSafety(false);
629 |
630 | // Versions of the Linux kernel before 5.1 treat any access below SP as
631 | // invalid so let's update it on the go, otherwise we'll get a segfault
632 | // instead of triggering the stack growth.
633 |
634 | switch (builtin.cpu.arch) {
635 | .x86_64 => {
636 | // %rax = probe length, %rsp = stack pointer
637 | asm volatile (
638 | \\ push %%rcx
639 | \\ mov %%rax, %%rcx
640 | \\ cmp $0x1000,%%rcx
641 | \\ jb 2f
642 | \\ 1:
643 | \\ sub $0x1000,%%rsp
644 | \\ orl $0,16(%%rsp)
645 | \\ sub $0x1000,%%rcx
646 | \\ cmp $0x1000,%%rcx
647 | \\ ja 1b
648 | \\ 2:
649 | \\ sub %%rcx, %%rsp
650 | \\ orl $0,16(%%rsp)
651 | \\ add %%rax,%%rsp
652 | \\ pop %%rcx
653 | \\ ret
654 | );
655 | },
656 | .x86 => {
657 | // %eax = probe length, %esp = stack pointer
658 | asm volatile (
659 | \\ push %%ecx
660 | \\ mov %%eax, %%ecx
661 | \\ cmp $0x1000,%%ecx
662 | \\ jb 2f
663 | \\ 1:
664 | \\ sub $0x1000,%%esp
665 | \\ orl $0,8(%%esp)
666 | \\ sub $0x1000,%%ecx
667 | \\ cmp $0x1000,%%ecx
668 | \\ ja 1b
669 | \\ 2:
670 | \\ sub %%ecx, %%esp
671 | \\ orl $0,8(%%esp)
672 | \\ add %%eax,%%esp
673 | \\ pop %%ecx
674 | \\ ret
675 | );
676 | },
677 | else => {},
678 | }
679 |
680 | unreachable;
681 | }
682 |
683 | fn win_probe_stack_only() void {
684 | @setRuntimeSafety(false);
685 |
686 | switch (builtin.cpu.arch) {
687 | .x86_64 => {
688 | asm volatile (
689 | \\ push %%rcx
690 | \\ push %%rax
691 | \\ cmp $0x1000,%%rax
692 | \\ lea 24(%%rsp),%%rcx
693 | \\ jb 1f
694 | \\ 2:
695 | \\ sub $0x1000,%%rcx
696 | \\ test %%rcx,(%%rcx)
697 | \\ sub $0x1000,%%rax
698 | \\ cmp $0x1000,%%rax
699 | \\ ja 2b
700 | \\ 1:
701 | \\ sub %%rax,%%rcx
702 | \\ test %%rcx,(%%rcx)
703 | \\ pop %%rax
704 | \\ pop %%rcx
705 | \\ ret
706 | );
707 | },
708 | .x86 => {
709 | asm volatile (
710 | \\ push %%ecx
711 | \\ push %%eax
712 | \\ cmp $0x1000,%%eax
713 | \\ lea 12(%%esp),%%ecx
714 | \\ jb 1f
715 | \\ 2:
716 | \\ sub $0x1000,%%ecx
717 | \\ test %%ecx,(%%ecx)
718 | \\ sub $0x1000,%%eax
719 | \\ cmp $0x1000,%%eax
720 | \\ ja 2b
721 | \\ 1:
722 | \\ sub %%eax,%%ecx
723 | \\ test %%ecx,(%%ecx)
724 | \\ pop %%eax
725 | \\ pop %%ecx
726 | \\ ret
727 | );
728 | },
729 | else => {},
730 | }
731 | if (comptime builtin.cpu.arch.isAARCH64()) {
732 | // NOTE: page size hardcoded to 4096 for now
733 | asm volatile (
734 | \\ lsl x16, x15, #4
735 | \\ mov x17, sp
736 | \\1:
737 | \\
738 | \\ sub x17, x17, 4096
739 | \\ subs x16, x16, 4096
740 | \\ ldr xzr, [x17]
741 | \\ b.gt 1b
742 | \\
743 | \\ ret
744 | );
745 | }
746 |
747 | unreachable;
748 | }
749 |
750 | fn win_probe_stack_adjust_sp() void {
751 | @setRuntimeSafety(false);
752 |
753 | switch (builtin.cpu.arch) {
754 | .x86_64 => {
755 | asm volatile (
756 | \\ push %%rcx
757 | \\ cmp $0x1000,%%rax
758 | \\ lea 16(%%rsp),%%rcx
759 | \\ jb 1f
760 | \\ 2:
761 | \\ sub $0x1000,%%rcx
762 | \\ test %%rcx,(%%rcx)
763 | \\ sub $0x1000,%%rax
764 | \\ cmp $0x1000,%%rax
765 | \\ ja 2b
766 | \\ 1:
767 | \\ sub %%rax,%%rcx
768 | \\ test %%rcx,(%%rcx)
769 | \\
770 | \\ lea 8(%%rsp),%%rax
771 | \\ mov %%rcx,%%rsp
772 | \\ mov -8(%%rax),%%rcx
773 | \\ push (%%rax)
774 | \\ sub %%rsp,%%rax
775 | \\ ret
776 | );
777 | },
778 | .x86 => {
779 | asm volatile (
780 | \\ push %%ecx
781 | \\ cmp $0x1000,%%eax
782 | \\ lea 8(%%esp),%%ecx
783 | \\ jb 1f
784 | \\ 2:
785 | \\ sub $0x1000,%%ecx
786 | \\ test %%ecx,(%%ecx)
787 | \\ sub $0x1000,%%eax
788 | \\ cmp $0x1000,%%eax
789 | \\ ja 2b
790 | \\ 1:
791 | \\ sub %%eax,%%ecx
792 | \\ test %%ecx,(%%ecx)
793 | \\
794 | \\ lea 4(%%esp),%%eax
795 | \\ mov %%ecx,%%esp
796 | \\ mov -4(%%eax),%%ecx
797 | \\ push (%%eax)
798 | \\ sub %%esp,%%eax
799 | \\ ret
800 | );
801 | },
802 | else => {},
803 | }
804 |
805 | unreachable;
806 | }
807 |
808 | // Windows has a multitude of stack-probing functions with similar names and
809 | // slightly different behaviours: some behave as alloca() and update the stack
810 | // pointer after probing the stack, other do not.
811 | //
812 | // Function name | Adjusts the SP? |
813 | // | x86 | x86_64 |
814 | // ----------------------------------------
815 | // _chkstk (_alloca) | yes | yes |
816 | // __chkstk | yes | no |
817 | // __chkstk_ms | no | no |
818 | // ___chkstk (__alloca) | yes | yes |
819 | // ___chkstk_ms | no | no |
820 |
821 | fn _chkstk() callconv(.Naked) void {
822 | @setRuntimeSafety(false);
823 | @call(.always_inline, win_probe_stack_adjust_sp, .{});
824 | }
825 | fn __chkstk() callconv(.Naked) void {
826 | @setRuntimeSafety(false);
827 | if (comptime builtin.cpu.arch.isAARCH64()) {
828 | @call(.always_inline, win_probe_stack_only, .{});
829 | } else switch (builtin.cpu.arch) {
830 | .x86 => @call(.always_inline, win_probe_stack_adjust_sp, .{}),
831 | .x86_64 => @call(.always_inline, win_probe_stack_only, .{}),
832 | else => unreachable,
833 | }
834 | }
835 | fn ___chkstk() callconv(.Naked) void {
836 | @setRuntimeSafety(false);
837 | @call(.always_inline, win_probe_stack_adjust_sp, .{});
838 | }
839 | fn __chkstk_ms() callconv(.Naked) void {
840 | @setRuntimeSafety(false);
841 | @call(.always_inline, win_probe_stack_only, .{});
842 | }
843 | fn ___chkstk_ms() callconv(.Naked) void {
844 | @setRuntimeSafety(false);
845 | @call(.always_inline, win_probe_stack_only, .{});
846 | }
847 |
--------------------------------------------------------------------------------
/src/common.zig:
--------------------------------------------------------------------------------
1 | // Lowest layer of the codebase, that contains types and code used in higher layers
2 |
3 | const std = @import("std");
4 |
5 | pub const StableArray = @import("stable-array").StableArray;
6 |
7 | pub fn decodeLEB128(comptime T: type, reader: anytype) !T {
8 | if (@typeInfo(T).Int.signedness == .signed) {
9 | return std.leb.readILEB128(T, reader) catch |e| {
10 | if (e == error.Overflow) {
11 | return error.MalformedLEB128;
12 | } else {
13 | return e;
14 | }
15 | };
16 | } else {
17 | return std.leb.readULEB128(T, reader) catch |e| {
18 | if (e == error.Overflow) {
19 | return error.MalformedLEB128;
20 | } else {
21 | return e;
22 | }
23 | };
24 | }
25 | }
26 |
27 | pub const LogLevel = enum(c_int) {
28 | Info,
29 | Error,
30 | };
31 |
32 | pub const Logger = struct {
33 | const LogFn = *const fn (level: LogLevel, text: [:0]const u8) void;
34 |
35 | log_fn: ?LogFn,
36 |
37 | pub fn default() Logger {
38 | return .{
39 | .log_fn = &defaultLog,
40 | };
41 | }
42 |
43 | pub fn empty() Logger {
44 | return .{
45 | .log_fn = null,
46 | };
47 | }
48 |
49 | fn defaultLog(level: LogLevel, text: [:0]const u8) void {
50 | var fd = switch (level) {
51 | .Info => std.io.getStdOut(),
52 | .Error => std.io.getStdErr(),
53 | };
54 | var writer = fd.writer();
55 | nosuspend writer.writeAll(text) catch |e| {
56 | std.debug.print("Failed logging due to error: {}\n", .{e});
57 | };
58 | }
59 |
60 | pub fn info(self: Logger, comptime format: []const u8, args: anytype) void {
61 | self.log(.Info, format, args);
62 | }
63 |
64 | pub fn err(self: Logger, comptime format: []const u8, args: anytype) void {
65 | self.log(.Error, format, args);
66 | }
67 |
68 | pub fn log(self: Logger, level: LogLevel, comptime format: []const u8, args: anytype) void {
69 | if (self.log_fn) |logger| {
70 | var buf: [2048]u8 = undefined;
71 | const formatted = std.fmt.bufPrintZ(&buf, format ++ "\n", args) catch |e| {
72 | std.debug.print("Failed logging due to error: {}\n", .{e});
73 | return;
74 | };
75 | logger(level, formatted);
76 | }
77 | }
78 | };
79 |
80 | pub const ScratchAllocator = struct {
81 | buffer: StableArray(u8),
82 |
83 | const InitOpts = struct {
84 | max_size: usize,
85 | };
86 |
87 | fn init(opts: InitOpts) ScratchAllocator {
88 | return ScratchAllocator{
89 | .buffer = StableArray(u8).init(opts.max_size),
90 | };
91 | }
92 |
93 | pub fn allocator(self: *ScratchAllocator) std.mem.Allocator {
94 | return std.mem.Allocator.init(self, alloc, resize, free);
95 | }
96 |
97 | pub fn reset(self: *ScratchAllocator) void {
98 | self.buffer.resize(0) catch unreachable;
99 | }
100 |
101 | fn alloc(
102 | self: *ScratchAllocator,
103 | len: usize,
104 | ptr_align: u29,
105 | len_align: u29,
106 | ret_addr: usize,
107 | ) std.mem.Allocator.Error![]u8 {
108 | _ = ret_addr;
109 | _ = len_align;
110 |
111 | const alloc_size = len;
112 | const offset_begin = std.mem.alignForward(self.buffer.items.len, ptr_align);
113 | const offset_end = offset_begin + alloc_size;
114 | self.buffer.resize(offset_end) catch {
115 | return std.mem.Allocator.Error.OutOfMemory;
116 | };
117 | return self.buffer.items[offset_begin..offset_end];
118 | }
119 |
120 | fn resize(
121 | self: *ScratchAllocator,
122 | old_mem: []u8,
123 | old_align: u29,
124 | new_size: usize,
125 | len_align: u29,
126 | ret_addr: usize,
127 | ) ?usize {
128 | _ = self;
129 | _ = old_align;
130 | _ = ret_addr;
131 |
132 | if (new_size > old_mem.len) {
133 | return null;
134 | }
135 | const aligned_size: usize = if (len_align == 0) new_size else std.mem.alignForward(new_size, len_align);
136 | return aligned_size;
137 | }
138 |
139 | fn free(
140 | self: *ScratchAllocator,
141 | old_mem: []u8,
142 | old_align: u29,
143 | ret_addr: usize,
144 | ) void {
145 | _ = self;
146 | _ = old_mem;
147 | _ = old_align;
148 | _ = ret_addr;
149 | }
150 | };
151 |
--------------------------------------------------------------------------------
/src/core.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const common = @import("common.zig");
3 | const def = @import("definition.zig");
4 | const inst = @import("instance.zig");
5 | const vm_stack = @import("vm_stack.zig");
6 | const vm_register = @import("vm_register.zig");
7 | pub const config = @import("config");
8 | pub const wasi = @import("wasi.zig");
9 |
10 | pub const LogLevel = common.LogLevel;
11 | pub const Logger = common.Logger;
12 |
13 | pub const i8x16 = def.i8x16;
14 | pub const u8x16 = def.u8x16;
15 | pub const i16x8 = def.i16x8;
16 | pub const u16x8 = def.u16x8;
17 | pub const i32x4 = def.i32x4;
18 | pub const u32x4 = def.u32x4;
19 | pub const i64x2 = def.i64x2;
20 | pub const u64x2 = def.u64x2;
21 | pub const f32x4 = def.f32x4;
22 | pub const f64x2 = def.f64x2;
23 | pub const v128 = def.v128;
24 |
25 | pub const MalformedError = def.MalformedError;
26 | pub const ValidationError = def.ValidationError;
27 |
28 | pub const FunctionExport = def.FunctionExport;
29 | pub const FunctionHandle = def.FunctionHandle;
30 | pub const FunctionHandleType = def.FunctionHandleType;
31 | pub const GlobalDefinition = def.GlobalDefinition;
32 | pub const GlobalMut = def.GlobalMut;
33 | pub const Limits = def.Limits;
34 | pub const ModuleDefinition = def.ModuleDefinition;
35 | pub const ModuleDefinitionOpts = def.ModuleDefinitionOpts;
36 | pub const TaggedVal = def.TaggedVal;
37 | pub const Val = def.Val;
38 | pub const ValType = def.ValType;
39 |
40 | pub const UnlinkableError = inst.UnlinkableError;
41 | pub const UninstantiableError = inst.UninstantiableError;
42 | pub const ExportError = inst.ExportError;
43 | pub const TrapError = inst.TrapError;
44 |
45 | pub const DebugTrace = inst.DebugTrace;
46 | pub const GlobalImport = inst.GlobalImport;
47 | pub const GlobalInstance = inst.GlobalInstance;
48 | pub const MemoryImport = inst.MemoryImport;
49 | pub const MemoryInstance = inst.MemoryInstance;
50 | pub const ModuleImportPackage = inst.ModuleImportPackage;
51 | pub const ModuleInstance = inst.ModuleInstance;
52 | pub const ModuleInstantiateOpts = inst.ModuleInstantiateOpts;
53 | pub const TableImport = inst.TableImport;
54 | pub const TableInstance = inst.TableInstance;
55 | pub const WasmMemoryExternal = inst.WasmMemoryExternal;
56 | pub const WasmMemoryFreeFunction = inst.WasmMemoryFreeFunction;
57 | pub const WasmMemoryResizeFunction = inst.WasmMemoryResizeFunction;
58 | pub const InvokeOpts = inst.InvokeOpts;
59 |
60 | const AllocError = std.mem.Allocator.Error;
61 |
62 | pub fn createModuleDefinition(allocator: std.mem.Allocator, opts: ModuleDefinitionOpts) AllocError!*ModuleDefinition {
63 | return try ModuleDefinition.create(allocator, opts);
64 | }
65 |
66 | pub const VmType = enum {
67 | Stack,
68 | Register,
69 | };
70 |
71 | pub fn createModuleInstance(vm_type: VmType, module_def: *const ModuleDefinition, allocator: std.mem.Allocator) AllocError!*ModuleInstance {
72 | const vm: *inst.VM = switch (vm_type) {
73 | .Stack => try inst.VM.create(vm_stack.StackVM, allocator),
74 | .Register => try inst.VM.create(vm_register.RegisterVM, allocator),
75 | };
76 | return try ModuleInstance.create(module_def, vm, allocator);
77 | }
78 |
--------------------------------------------------------------------------------
/src/metering.zig:
--------------------------------------------------------------------------------
1 | const config = @import("config");
2 | const Instruction = @import("definition.zig").Instruction;
3 |
4 | pub const enabled = config.enable_metering;
5 |
6 | pub const Meter = if (enabled) usize else void;
7 |
8 | pub const initial_meter = if (enabled) 0 else {};
9 |
10 | pub const MeteringTrapError = if (enabled) error{TrapMeterExceeded} else error{};
11 |
12 | pub fn reduce(fuel: Meter, instruction: Instruction) Meter {
13 | if (fuel == 0) {
14 | return fuel;
15 | }
16 | return switch (instruction.opcode) {
17 | .Invalid, .Unreachable, .DebugTrap, .Noop, .Block, .Loop, .If, .IfNoElse, .Else, .End, .Branch, .Branch_If, .Branch_Table, .Drop => fuel,
18 | else => fuel - 1,
19 | };
20 | }
21 |
--------------------------------------------------------------------------------
/src/opcode.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const common = @import("common.zig");
3 |
4 | // A compressed version of the wasm opcodes for better table-oriented lookup (no holes). See WasmOpcode for the actual wasm representation.
5 | pub const Opcode = enum(u16) {
6 | Invalid, // Has no corresponding mapping in WasmOpcode.
7 | Unreachable,
8 | DebugTrap, // Has no corresponding mapping in WasmOpcode, intended for use in returning control flow to invoker
9 | Noop,
10 | Block,
11 | Loop,
12 | If,
13 | IfNoElse, // variant of If that assumes no else branch
14 | Else,
15 | End,
16 | Branch,
17 | Branch_If,
18 | Branch_Table,
19 | Return,
20 | Call,
21 | Call_Indirect,
22 | Drop,
23 | Select,
24 | Select_T,
25 | Local_Get,
26 | Local_Set,
27 | Local_Tee,
28 | Global_Get,
29 | Global_Set,
30 | Table_Get,
31 | Table_Set,
32 | I32_Load,
33 | I64_Load,
34 | F32_Load,
35 | F64_Load,
36 | I32_Load8_S,
37 | I32_Load8_U,
38 | I32_Load16_S,
39 | I32_Load16_U,
40 | I64_Load8_S,
41 | I64_Load8_U,
42 | I64_Load16_S,
43 | I64_Load16_U,
44 | I64_Load32_S,
45 | I64_Load32_U,
46 | I32_Store,
47 | I64_Store,
48 | F32_Store,
49 | F64_Store,
50 | I32_Store8,
51 | I32_Store16,
52 | I64_Store8,
53 | I64_Store16,
54 | I64_Store32,
55 | Memory_Size,
56 | Memory_Grow,
57 | I32_Const,
58 | I64_Const,
59 | F32_Const,
60 | F64_Const,
61 | I32_Eqz,
62 | I32_Eq,
63 | I32_NE,
64 | I32_LT_S,
65 | I32_LT_U,
66 | I32_GT_S,
67 | I32_GT_U,
68 | I32_LE_S,
69 | I32_LE_U,
70 | I32_GE_S,
71 | I32_GE_U,
72 | I64_Eqz,
73 | I64_Eq,
74 | I64_NE,
75 | I64_LT_S,
76 | I64_LT_U,
77 | I64_GT_S,
78 | I64_GT_U,
79 | I64_LE_S,
80 | I64_LE_U,
81 | I64_GE_S,
82 | I64_GE_U,
83 | F32_EQ,
84 | F32_NE,
85 | F32_LT,
86 | F32_GT,
87 | F32_LE,
88 | F32_GE,
89 | F64_EQ,
90 | F64_NE,
91 | F64_LT,
92 | F64_GT,
93 | F64_LE,
94 | F64_GE,
95 | I32_Clz,
96 | I32_Ctz,
97 | I32_Popcnt,
98 | I32_Add,
99 | I32_Sub,
100 | I32_Mul,
101 | I32_Div_S,
102 | I32_Div_U,
103 | I32_Rem_S,
104 | I32_Rem_U,
105 | I32_And,
106 | I32_Or,
107 | I32_Xor,
108 | I32_Shl,
109 | I32_Shr_S,
110 | I32_Shr_U,
111 | I32_Rotl,
112 | I32_Rotr,
113 | I64_Clz,
114 | I64_Ctz,
115 | I64_Popcnt,
116 | I64_Add,
117 | I64_Sub,
118 | I64_Mul,
119 | I64_Div_S,
120 | I64_Div_U,
121 | I64_Rem_S,
122 | I64_Rem_U,
123 | I64_And,
124 | I64_Or,
125 | I64_Xor,
126 | I64_Shl,
127 | I64_Shr_S,
128 | I64_Shr_U,
129 | I64_Rotl,
130 | I64_Rotr,
131 | F32_Abs,
132 | F32_Neg,
133 | F32_Ceil,
134 | F32_Floor,
135 | F32_Trunc,
136 | F32_Nearest,
137 | F32_Sqrt,
138 | F32_Add,
139 | F32_Sub,
140 | F32_Mul,
141 | F32_Div,
142 | F32_Min,
143 | F32_Max,
144 | F32_Copysign,
145 | F64_Abs,
146 | F64_Neg,
147 | F64_Ceil,
148 | F64_Floor,
149 | F64_Trunc,
150 | F64_Nearest,
151 | F64_Sqrt,
152 | F64_Add,
153 | F64_Sub,
154 | F64_Mul,
155 | F64_Div,
156 | F64_Min,
157 | F64_Max,
158 | F64_Copysign,
159 | I32_Wrap_I64,
160 | I32_Trunc_F32_S,
161 | I32_Trunc_F32_U,
162 | I32_Trunc_F64_S,
163 | I32_Trunc_F64_U,
164 | I64_Extend_I32_S,
165 | I64_Extend_I32_U,
166 | I64_Trunc_F32_S,
167 | I64_Trunc_F32_U,
168 | I64_Trunc_F64_S,
169 | I64_Trunc_F64_U,
170 | F32_Convert_I32_S,
171 | F32_Convert_I32_U,
172 | F32_Convert_I64_S,
173 | F32_Convert_I64_U,
174 | F32_Demote_F64,
175 | F64_Convert_I32_S,
176 | F64_Convert_I32_U,
177 | F64_Convert_I64_S,
178 | F64_Convert_I64_U,
179 | F64_Promote_F32,
180 | I32_Reinterpret_F32,
181 | I64_Reinterpret_F64,
182 | F32_Reinterpret_I32,
183 | F64_Reinterpret_I64,
184 | I32_Extend8_S,
185 | I32_Extend16_S,
186 | I64_Extend8_S,
187 | I64_Extend16_S,
188 | I64_Extend32_S,
189 | Ref_Null,
190 | Ref_Is_Null,
191 | Ref_Func,
192 | I32_Trunc_Sat_F32_S,
193 | I32_Trunc_Sat_F32_U,
194 | I32_Trunc_Sat_F64_S,
195 | I32_Trunc_Sat_F64_U,
196 | I64_Trunc_Sat_F32_S,
197 | I64_Trunc_Sat_F32_U,
198 | I64_Trunc_Sat_F64_S,
199 | I64_Trunc_Sat_F64_U,
200 | Memory_Init,
201 | Data_Drop,
202 | Memory_Copy,
203 | Memory_Fill,
204 | Table_Init,
205 | Elem_Drop,
206 | Table_Copy,
207 | Table_Grow,
208 | Table_Size,
209 | Table_Fill,
210 | V128_Load,
211 | V128_Load8x8_S,
212 | V128_Load8x8_U,
213 | V128_Load16x4_S,
214 | V128_Load16x4_U,
215 | V128_Load32x2_S,
216 | V128_Load32x2_U,
217 | V128_Load8_Splat,
218 | V128_Load16_Splat,
219 | V128_Load32_Splat,
220 | V128_Load64_Splat,
221 | V128_Store,
222 | V128_Const,
223 | I8x16_Shuffle,
224 | I8x16_Swizzle,
225 | I8x16_Splat,
226 | I16x8_Splat,
227 | I32x4_Splat,
228 | I64x2_Splat,
229 | F32x4_Splat,
230 | F64x2_Splat,
231 | I8x16_Extract_Lane_S,
232 | I8x16_Extract_Lane_U,
233 | I8x16_Replace_Lane,
234 | I16x8_Extract_Lane_S,
235 | I16x8_Extract_Lane_U,
236 | I16x8_Replace_Lane,
237 | I32x4_Extract_Lane,
238 | I32x4_Replace_Lane,
239 | I64x2_Extract_Lane,
240 | I64x2_Replace_Lane,
241 | F32x4_Extract_Lane,
242 | F32x4_Replace_Lane,
243 | F64x2_Extract_Lane,
244 | F64x2_Replace_Lane,
245 | I8x16_EQ,
246 | I8x16_NE,
247 | I8x16_LT_S,
248 | I8x16_LT_U,
249 | I8x16_GT_S,
250 | I8x16_GT_U,
251 | I8x16_LE_S,
252 | I8x16_LE_U,
253 | I8x16_GE_S,
254 | I8x16_GE_U,
255 | I16x8_EQ,
256 | I16x8_NE,
257 | I16x8_LT_S,
258 | I16x8_LT_U,
259 | I16x8_GT_S,
260 | I16x8_GT_U,
261 | I16x8_LE_S,
262 | I16x8_LE_U,
263 | I16x8_GE_S,
264 | I16x8_GE_U,
265 | I32x4_EQ,
266 | I32x4_NE,
267 | I32x4_LT_S,
268 | I32x4_LT_U,
269 | I32x4_GT_S,
270 | I32x4_GT_U,
271 | I32x4_LE_S,
272 | I32x4_LE_U,
273 | I32x4_GE_S,
274 | I32x4_GE_U,
275 | F32x4_EQ,
276 | F32x4_NE,
277 | F32x4_LT,
278 | F32x4_GT,
279 | F32x4_LE,
280 | F32x4_GE,
281 | F64x2_EQ,
282 | F64x2_NE,
283 | F64x2_LT,
284 | F64x2_GT,
285 | F64x2_LE,
286 | F64x2_GE,
287 | V128_Not,
288 | V128_And,
289 | V128_AndNot,
290 | V128_Or,
291 | V128_Xor,
292 | V128_Bitselect,
293 | V128_AnyTrue,
294 | V128_Load8_Lane,
295 | V128_Load16_Lane,
296 | V128_Load32_Lane,
297 | V128_Load64_Lane,
298 | V128_Store8_Lane,
299 | V128_Store16_Lane,
300 | V128_Store32_Lane,
301 | V128_Store64_Lane,
302 | V128_Load32_Zero,
303 | V128_Load64_Zero,
304 | F32x4_Demote_F64x2_Zero,
305 | F64x2_Promote_Low_F32x4,
306 | I8x16_Abs,
307 | I8x16_Neg,
308 | I8x16_Popcnt,
309 | I8x16_AllTrue,
310 | I8x16_Bitmask,
311 | I8x16_Narrow_I16x8_S,
312 | I8x16_Narrow_I16x8_U,
313 | F32x4_Ceil,
314 | F32x4_Floor,
315 | F32x4_Trunc,
316 | F32x4_Nearest,
317 | I8x16_Shl,
318 | I8x16_Shr_S,
319 | I8x16_Shr_U,
320 | I8x16_Add,
321 | I8x16_Add_Sat_S,
322 | I8x16_Add_Sat_U,
323 | I8x16_Sub,
324 | I8x16_Sub_Sat_S,
325 | I8x16_Sub_Sat_U,
326 | F64x2_Ceil,
327 | F64x2_Floor,
328 | I8x16_Min_S,
329 | I8x16_Min_U,
330 | I8x16_Max_S,
331 | I8x16_Max_U,
332 | F64x2_Trunc,
333 | I8x16_Avgr_U,
334 | I16x8_Extadd_Pairwise_I8x16_S,
335 | I16x8_Extadd_Pairwise_I8x16_U,
336 | I32x4_Extadd_Pairwise_I16x8_S,
337 | I32x4_Extadd_Pairwise_I16x8_U,
338 | I16x8_Abs,
339 | I16x8_Neg,
340 | I16x8_Q15mulr_Sat_S,
341 | I16x8_AllTrue,
342 | I16x8_Bitmask,
343 | I16x8_Narrow_I32x4_S,
344 | I16x8_Narrow_I32x4_U,
345 | I16x8_Extend_Low_I8x16_S,
346 | I16x8_Extend_High_I8x16_S,
347 | I16x8_Extend_Low_I8x16_U,
348 | I16x8_Extend_High_I8x16_U,
349 | I16x8_Shl,
350 | I16x8_Shr_S,
351 | I16x8_Shr_U,
352 | I16x8_Add,
353 | I16x8_Add_Sat_S,
354 | I16x8_Add_Sat_U,
355 | I16x8_Sub,
356 | I16x8_Sub_Sat_S,
357 | I16x8_Sub_Sat_U,
358 | F64x2_Nearest,
359 | I16x8_Mul,
360 | I16x8_Min_S,
361 | I16x8_Min_U,
362 | I16x8_Max_S,
363 | I16x8_Max_U,
364 | I16x8_Avgr_U,
365 | I16x8_Extmul_Low_I8x16_S,
366 | I16x8_Extmul_High_I8x16_S,
367 | I16x8_Extmul_Low_I8x16_U,
368 | I16x8_Extmul_High_I8x16_U,
369 | I32x4_Abs,
370 | I32x4_Neg,
371 | I32x4_AllTrue,
372 | I32x4_Bitmask,
373 | I32x4_Extend_Low_I16x8_S,
374 | I32x4_Extend_High_I16x8_S,
375 | I32x4_Extend_Low_I16x8_U,
376 | I32x4_Extend_High_I16x8_U,
377 | I32x4_Shl,
378 | I32x4_Shr_S,
379 | I32x4_Shr_U,
380 | I32x4_Add,
381 | I32x4_Sub,
382 | I32x4_Mul,
383 | I32x4_Min_S,
384 | I32x4_Min_U,
385 | I32x4_Max_S,
386 | I32x4_Max_U,
387 | I32x4_Dot_I16x8_S,
388 | I32x4_Extmul_Low_I16x8_S,
389 | I32x4_Extmul_High_I16x8_S,
390 | I32x4_Extmul_Low_I16x8_U,
391 | I32x4_Extmul_High_I16x8_U,
392 | I64x2_Abs,
393 | I64x2_Neg,
394 | I64x2_AllTrue,
395 | I64x2_Bitmask,
396 | I64x2_Extend_Low_I32x4_S,
397 | I64x2_Extend_High_I32x4_S,
398 | I64x2_Extend_Low_I32x4_U,
399 | I64x2_Extend_High_I32x4_U,
400 | I64x2_Shl,
401 | I64x2_Shr_S,
402 | I64x2_Shr_U,
403 | I64x2_Add,
404 | I64x2_Sub,
405 | I64x2_Mul,
406 | I64x2_EQ,
407 | I64x2_NE,
408 | I64x2_LT_S,
409 | I64x2_GT_S,
410 | I64x2_LE_S,
411 | I64x2_GE_S,
412 | I64x2_Extmul_Low_I32x4_S,
413 | I64x2_Extmul_High_I32x4_S,
414 | I64x2_Extmul_Low_I32x4_U,
415 | I64x2_Extmul_High_I32x4_U,
416 | F32x4_Abs,
417 | F32x4_Neg,
418 | F32x4_Sqrt,
419 | F32x4_Add,
420 | F32x4_Sub,
421 | F32x4_Mul,
422 | F32x4_Div,
423 | F32x4_Min,
424 | F32x4_Max,
425 | F32x4_PMin,
426 | F32x4_PMax,
427 | F64x2_Abs,
428 | F64x2_Neg,
429 | F64x2_Sqrt,
430 | F64x2_Add,
431 | F64x2_Sub,
432 | F64x2_Mul,
433 | F64x2_Div,
434 | F64x2_Min,
435 | F64x2_Max,
436 | F64x2_PMin,
437 | F64x2_PMax,
438 | F32x4_Trunc_Sat_F32x4_S,
439 | F32x4_Trunc_Sat_F32x4_U,
440 | F32x4_Convert_I32x4_S,
441 | F32x4_Convert_I32x4_U,
442 | I32x4_Trunc_Sat_F64x2_S_Zero,
443 | I32x4_Trunc_Sat_F64x2_U_Zero,
444 | F64x2_Convert_Low_I32x4_S,
445 | F64x2_Convert_Low_I32x4_U,
446 |
447 | pub fn beginsBlock(opcode: Opcode) bool {
448 | return switch (opcode) {
449 | .Block => true,
450 | .Loop => true,
451 | .If => true,
452 | else => false,
453 | };
454 | }
455 |
456 | pub fn isIf(opcode: Opcode) bool {
457 | return switch (opcode) {
458 | .If, .IfNoElse => true,
459 | else => false,
460 | };
461 | }
462 | };
463 |
464 | pub const WasmOpcode = enum(u16) {
465 | Unreachable = 0x00,
466 | Noop = 0x01,
467 | Block = 0x02,
468 | Loop = 0x03,
469 | If = 0x04,
470 | Else = 0x05,
471 | End = 0x0B,
472 | Branch = 0x0C,
473 | Branch_If = 0x0D,
474 | Branch_Table = 0x0E,
475 | Return = 0x0F,
476 | Call = 0x10,
477 | Call_Indirect = 0x11,
478 | Drop = 0x1A,
479 | Select = 0x1B,
480 | Select_T = 0x1C,
481 | Local_Get = 0x20,
482 | Local_Set = 0x21,
483 | Local_Tee = 0x22,
484 | Global_Get = 0x23,
485 | Global_Set = 0x24,
486 | Table_Get = 0x25,
487 | Table_Set = 0x26,
488 | I32_Load = 0x28,
489 | I64_Load = 0x29,
490 | F32_Load = 0x2A,
491 | F64_Load = 0x2B,
492 | I32_Load8_S = 0x2C,
493 | I32_Load8_U = 0x2D,
494 | I32_Load16_S = 0x2E,
495 | I32_Load16_U = 0x2F,
496 | I64_Load8_S = 0x30,
497 | I64_Load8_U = 0x31,
498 | I64_Load16_S = 0x32,
499 | I64_Load16_U = 0x33,
500 | I64_Load32_S = 0x34,
501 | I64_Load32_U = 0x35,
502 | I32_Store = 0x36,
503 | I64_Store = 0x37,
504 | F32_Store = 0x38,
505 | F64_Store = 0x39,
506 | I32_Store8 = 0x3A,
507 | I32_Store16 = 0x3B,
508 | I64_Store8 = 0x3C,
509 | I64_Store16 = 0x3D,
510 | I64_Store32 = 0x3E,
511 | Memory_Size = 0x3F,
512 | Memory_Grow = 0x40,
513 | I32_Const = 0x41,
514 | I64_Const = 0x42,
515 | F32_Const = 0x43,
516 | F64_Const = 0x44,
517 | I32_Eqz = 0x45,
518 | I32_Eq = 0x46,
519 | I32_NE = 0x47,
520 | I32_LT_S = 0x48,
521 | I32_LT_U = 0x49,
522 | I32_GT_S = 0x4A,
523 | I32_GT_U = 0x4B,
524 | I32_LE_S = 0x4C,
525 | I32_LE_U = 0x4D,
526 | I32_GE_S = 0x4E,
527 | I32_GE_U = 0x4F,
528 | I64_Eqz = 0x50,
529 | I64_Eq = 0x51,
530 | I64_NE = 0x52,
531 | I64_LT_S = 0x53,
532 | I64_LT_U = 0x54,
533 | I64_GT_S = 0x55,
534 | I64_GT_U = 0x56,
535 | I64_LE_S = 0x57,
536 | I64_LE_U = 0x58,
537 | I64_GE_S = 0x59,
538 | I64_GE_U = 0x5A,
539 | F32_EQ = 0x5B,
540 | F32_NE = 0x5C,
541 | F32_LT = 0x5D,
542 | F32_GT = 0x5E,
543 | F32_LE = 0x5F,
544 | F32_GE = 0x60,
545 | F64_EQ = 0x61,
546 | F64_NE = 0x62,
547 | F64_LT = 0x63,
548 | F64_GT = 0x64,
549 | F64_LE = 0x65,
550 | F64_GE = 0x66,
551 | I32_Clz = 0x67,
552 | I32_Ctz = 0x68,
553 | I32_Popcnt = 0x69,
554 | I32_Add = 0x6A,
555 | I32_Sub = 0x6B,
556 | I32_Mul = 0x6C,
557 | I32_Div_S = 0x6D,
558 | I32_Div_U = 0x6E,
559 | I32_Rem_S = 0x6F,
560 | I32_Rem_U = 0x70,
561 | I32_And = 0x71,
562 | I32_Or = 0x72,
563 | I32_Xor = 0x73,
564 | I32_Shl = 0x74,
565 | I32_Shr_S = 0x75,
566 | I32_Shr_U = 0x76,
567 | I32_Rotl = 0x77,
568 | I32_Rotr = 0x78,
569 | I64_Clz = 0x79,
570 | I64_Ctz = 0x7A,
571 | I64_Popcnt = 0x7B,
572 | I64_Add = 0x7C,
573 | I64_Sub = 0x7D,
574 | I64_Mul = 0x7E,
575 | I64_Div_S = 0x7F,
576 | I64_Div_U = 0x80,
577 | I64_Rem_S = 0x81,
578 | I64_Rem_U = 0x82,
579 | I64_And = 0x83,
580 | I64_Or = 0x84,
581 | I64_Xor = 0x85,
582 | I64_Shl = 0x86,
583 | I64_Shr_S = 0x87,
584 | I64_Shr_U = 0x88,
585 | I64_Rotl = 0x89,
586 | I64_Rotr = 0x8A,
587 | F32_Abs = 0x8B,
588 | F32_Neg = 0x8C,
589 | F32_Ceil = 0x8D,
590 | F32_Floor = 0x8E,
591 | F32_Trunc = 0x8F,
592 | F32_Nearest = 0x90,
593 | F32_Sqrt = 0x91,
594 | F32_Add = 0x92,
595 | F32_Sub = 0x93,
596 | F32_Mul = 0x94,
597 | F32_Div = 0x95,
598 | F32_Min = 0x96,
599 | F32_Max = 0x97,
600 | F32_Copysign = 0x98,
601 | F64_Abs = 0x99,
602 | F64_Neg = 0x9A,
603 | F64_Ceil = 0x9B,
604 | F64_Floor = 0x9C,
605 | F64_Trunc = 0x9D,
606 | F64_Nearest = 0x9E,
607 | F64_Sqrt = 0x9F,
608 | F64_Add = 0xA0,
609 | F64_Sub = 0xA1,
610 | F64_Mul = 0xA2,
611 | F64_Div = 0xA3,
612 | F64_Min = 0xA4,
613 | F64_Max = 0xA5,
614 | F64_Copysign = 0xA6,
615 | I32_Wrap_I64 = 0xA7,
616 | I32_Trunc_F32_S = 0xA8,
617 | I32_Trunc_F32_U = 0xA9,
618 | I32_Trunc_F64_S = 0xAA,
619 | I32_Trunc_F64_U = 0xAB,
620 | I64_Extend_I32_S = 0xAC,
621 | I64_Extend_I32_U = 0xAD,
622 | I64_Trunc_F32_S = 0xAE,
623 | I64_Trunc_F32_U = 0xAF,
624 | I64_Trunc_F64_S = 0xB0,
625 | I64_Trunc_F64_U = 0xB1,
626 | F32_Convert_I32_S = 0xB2,
627 | F32_Convert_I32_U = 0xB3,
628 | F32_Convert_I64_S = 0xB4,
629 | F32_Convert_I64_U = 0xB5,
630 | F32_Demote_F64 = 0xB6,
631 | F64_Convert_I32_S = 0xB7,
632 | F64_Convert_I32_U = 0xB8,
633 | F64_Convert_I64_S = 0xB9,
634 | F64_Convert_I64_U = 0xBA,
635 | F64_Promote_F32 = 0xBB,
636 | I32_Reinterpret_F32 = 0xBC,
637 | I64_Reinterpret_F64 = 0xBD,
638 | F32_Reinterpret_I32 = 0xBE,
639 | F64_Reinterpret_I64 = 0xBF,
640 | I32_Extend8_S = 0xC0,
641 | I32_Extend16_S = 0xC1,
642 | I64_Extend8_S = 0xC2,
643 | I64_Extend16_S = 0xC3,
644 | I64_Extend32_S = 0xC4,
645 | Ref_Null = 0xD0,
646 | Ref_Is_Null = 0xD1,
647 | Ref_Func = 0xD2,
648 | I32_Trunc_Sat_F32_S = 0xFC00,
649 | I32_Trunc_Sat_F32_U = 0xFC01,
650 | I32_Trunc_Sat_F64_S = 0xFC02,
651 | I32_Trunc_Sat_F64_U = 0xFC03,
652 | I64_Trunc_Sat_F32_S = 0xFC04,
653 | I64_Trunc_Sat_F32_U = 0xFC05,
654 | I64_Trunc_Sat_F64_S = 0xFC06,
655 | I64_Trunc_Sat_F64_U = 0xFC07,
656 | Memory_Init = 0xFC08,
657 | Data_Drop = 0xFC09,
658 | Memory_Copy = 0xFC0A,
659 | Memory_Fill = 0xFC0B,
660 | Table_Init = 0xFC0C,
661 | Elem_Drop = 0xFC0D,
662 | Table_Copy = 0xFC0E,
663 | Table_Grow = 0xFC0F,
664 | Table_Size = 0xFC10,
665 | Table_Fill = 0xFC11,
666 | V128_Load = 0xFD00,
667 | V128_Load8x8_S = 0xFD01,
668 | V128_Load8x8_U = 0xFD02,
669 | V128_Load16x4_S = 0xFD03,
670 | V128_Load16x4_U = 0xFD04,
671 | V128_Load32x2_S = 0xFD05,
672 | V128_Load32x2_U = 0xFD06,
673 | V128_Load8_Splat = 0xFD07,
674 | V128_Load16_Splat = 0xFD08,
675 | V128_Load32_Splat = 0xFD09,
676 | V128_Load64_Splat = 0xFD0A,
677 | V128_Store = 0xFD0B,
678 | V128_Const = 0xFD0C,
679 | I8x16_Shuffle = 0xFD0D,
680 | I8x16_Swizzle = 0xFD0E,
681 | I8x16_Splat = 0xFD0F,
682 | I16x8_Splat = 0xFD10,
683 | I32x4_Splat = 0xFD11,
684 | I64x2_Splat = 0xFD12,
685 | F32x4_Splat = 0xFD13,
686 | F64x2_Splat = 0xFD14,
687 | I8x16_Extract_Lane_S = 0xFD15,
688 | I8x16_Extract_Lane_U = 0xFD16,
689 | I8x16_Replace_Lane = 0xFD17,
690 | I16x8_Extract_Lane_S = 0xFD18,
691 | I16x8_Extract_Lane_U = 0xFD19,
692 | I16x8_Replace_Lane = 0xFD1A,
693 | I32x4_Extract_Lane = 0xFD1B,
694 | I32x4_Replace_Lane = 0xFD1C,
695 | I64x2_Extract_Lane = 0xFD1D,
696 | I64x2_Replace_Lane = 0xFD1E,
697 | F32x4_Extract_Lane = 0xFD1F,
698 | F32x4_Replace_Lane = 0xFD20,
699 | F64x2_Extract_Lane = 0xFD21,
700 | F64x2_Replace_Lane = 0xFD22,
701 | I8x16_EQ = 0xFD23,
702 | I8x16_NE = 0xFD24,
703 | I8x16_LT_S = 0xFD25,
704 | I8x16_LT_U = 0xFD26,
705 | I8x16_GT_S = 0xFD27,
706 | I8x16_GT_U = 0xFD28,
707 | I8x16_LE_S = 0xFD29,
708 | I8x16_LE_U = 0xFD2A,
709 | I8x16_GE_S = 0xFD2B,
710 | I8x16_GE_U = 0xFD2C,
711 | I16x8_EQ = 0xFD2D,
712 | I16x8_NE = 0xFD2E,
713 | I16x8_LT_S = 0xFD2F,
714 | I16x8_LT_U = 0xFD30,
715 | I16x8_GT_S = 0xFD31,
716 | I16x8_GT_U = 0xFD32,
717 | I16x8_LE_S = 0xFD33,
718 | I16x8_LE_U = 0xFD34,
719 | I16x8_GE_S = 0xFD35,
720 | I16x8_GE_U = 0xFD36,
721 | I32x4_EQ = 0xFD37,
722 | I32x4_NE = 0xFD38,
723 | I32x4_LT_S = 0xFD39,
724 | I32x4_LT_U = 0xFD3A,
725 | I32x4_GT_S = 0xFD3B,
726 | I32x4_GT_U = 0xFD3C,
727 | I32x4_LE_S = 0xFD3D,
728 | I32x4_LE_U = 0xFD3E,
729 | I32x4_GE_S = 0xFD3F,
730 | I32x4_GE_U = 0xFD40,
731 | F32x4_EQ = 0xFD41,
732 | F32x4_NE = 0xFD42,
733 | F32x4_LT = 0xFD43,
734 | F32x4_GT = 0xFD44,
735 | F32x4_LE = 0xFD45,
736 | F32x4_GE = 0xFD46,
737 | F64x2_EQ = 0xFD47,
738 | F64x2_NE = 0xFD48,
739 | F64x2_LT = 0xFD49,
740 | F64x2_GT = 0xFD4A,
741 | F64x2_LE = 0xFD4B,
742 | F64x2_GE = 0xFD4C,
743 | V128_Not = 0xFD4D,
744 | V128_And = 0xFD4E,
745 | V128_AndNot = 0xFD4F,
746 | V128_Or = 0xFD50,
747 | V128_Xor = 0xFD51,
748 | V128_Bitselect = 0xFD52,
749 | V128_AnyTrue = 0xFD53,
750 | V128_Load8_Lane = 0xFD54,
751 | V128_Load16_Lane = 0xFD55,
752 | V128_Load32_Lane = 0xFD56,
753 | V128_Load64_Lane = 0xFD57,
754 | V128_Store8_Lane = 0xFD58,
755 | V128_Store16_Lane = 0xFD59,
756 | V128_Store32_Lane = 0xFD5A,
757 | V128_Store64_Lane = 0xFD5B,
758 | V128_Load32_Zero = 0xFD5C,
759 | V128_Load64_Zero = 0xFD5D,
760 | F32x4_Demote_F64x2_Zero = 0xFD5E,
761 | F64x2_Promote_Low_F32x4 = 0xFD5F,
762 | I8x16_Abs = 0xFD60,
763 | I8x16_Neg = 0xFD61,
764 | I8x16_Popcnt = 0xFD62,
765 | I8x16_AllTrue = 0xFD63,
766 | I8x16_Bitmask = 0xFD64,
767 | II8x16_Narrow_I16x8_S = 0xFD65,
768 | II8x16_Narrow_I16x8_U = 0xFD66,
769 | F32x4_Ceil = 0xFD67,
770 | F32x4_Floor = 0xFD68,
771 | F32x4_Trunc = 0xFD69,
772 | F32x4_Nearest = 0xFD6A,
773 | I8x16_Shl = 0xFD6B,
774 | I8x16_Shr_S = 0xFD6C,
775 | I8x16_Shr_U = 0xFD6D,
776 | I8x16_Add = 0xFD6E,
777 | I8x16_Add_Sat_S = 0xFD6F,
778 | I8x16_Add_Sat_U = 0xFD70,
779 | I8x16_Sub = 0xFD71,
780 | I8x16_Sub_Sat_S = 0xFD72,
781 | I8x16_Sub_Sat_U = 0xFD73,
782 | F64x2_Ceil = 0xFD74,
783 | F64x2_Floor = 0xFD75,
784 | I8x16_Min_S = 0xFD76,
785 | I8x16_Min_U = 0xFD77,
786 | I8x16_Max_S = 0xFD78,
787 | I8x16_Max_U = 0xFD79,
788 | F64x2_Trunc = 0xFD7A,
789 | I8x16_Avgr_U = 0xFD7B,
790 | I16x8_Extadd_Pairwise_I8x16_S = 0xFD7C,
791 | I16x8_Extadd_Pairwise_I8x16_U = 0xFD7D,
792 | I32x4_Extadd_Pairwise_I16x8_S = 0xFD7E,
793 | I32x4_Extadd_Pairwise_I16x8_U = 0xFD7F,
794 | I16x8_Abs = 0xFD80,
795 | I16x8_Neg = 0xFD81,
796 | I16x8_Q15mulr_Sat_S = 0xFD82,
797 | I16x8_AllTrue = 0xFD83,
798 | I16x8_Bitmask = 0xFD84,
799 | I16x8_Narrow_I32x4_S = 0xFD85,
800 | I16x8_Narrow_I32x4_U = 0xFD86,
801 | I16x8_Extend_Low_I8x16_S = 0xFD87,
802 | I16x8_Extend_High_I8x16_S = 0xFD88,
803 | I16x8_Extend_Low_I8x16_U = 0xFD89,
804 | I16x8_Extend_High_I8x16_U = 0xFD8A,
805 | I16x8_Shl = 0xFD8B,
806 | I16x8_Shr_S = 0xFD8C,
807 | I16x8_Shr_U = 0xFD8D,
808 | I16x8_Add = 0xFD8E,
809 | I16x8_Add_Sat_S = 0xFD8F,
810 | I16x8_Add_Sat_U = 0xFD90,
811 | I16x8_Sub = 0xFD91,
812 | I16x8_Sub_Sat_S = 0xFD92,
813 | I16x8_Sub_Sat_U = 0xFD93,
814 | F64x2_Nearest = 0xFD94,
815 | I16x8_Mul = 0xFD95,
816 | I16x8_Min_S = 0xFD96,
817 | I16x8_Min_U = 0xFD97,
818 | I16x8_Max_S = 0xFD98,
819 | I16x8_Max_U = 0xFD99,
820 | I16x8_Avgr_U = 0xFD9B,
821 | I16x8_Extmul_Low_I8x16_S = 0xFD9C,
822 | I16x8_Extmul_High_I8x16_S = 0xFD9D,
823 | I16x8_Extmul_Low_I8x16_U = 0xFD9E,
824 | I16x8_Extmul_High_I8x16_U = 0xFD9F,
825 | I32x4_Abs = 0xFDA0,
826 | I32x4_Neg = 0xFDA1,
827 | I32x4_AllTrue = 0xFDA3,
828 | I32x4_Bitmask = 0xFDA4,
829 | I32x4_Extend_Low_I16x8_S = 0xFDA7,
830 | I32x4_Extend_High_I16x8_S = 0xFDA8,
831 | I32x4_Extend_Low_I16x8_U = 0xFDA9,
832 | I32x4_Extend_High_I16x8_U = 0xFDAA,
833 | I32x4_Shl = 0xFDAB,
834 | I32x4_Shr_S = 0xFDAC,
835 | I32x4_Shr_U = 0xFDAD,
836 | I32x4_Add = 0xFDAE,
837 | I32x4_Sub = 0xFDB1,
838 | I32x4_Mul = 0xFDB5,
839 | I32x4_Min_S = 0xFDB6,
840 | I32x4_Min_U = 0xFDB7,
841 | I32x4_Max_S = 0xFDB8,
842 | I32x4_Max_U = 0xFDB9,
843 | I32x4_Dot_I16x8_S = 0xFDBA,
844 | I32x4_Extmul_Low_I16x8_S = 0xFDBC,
845 | I32x4_Extmul_High_I16x8_S = 0xFDBD,
846 | I32x4_Extmul_Low_I16x8_U = 0xFDBE,
847 | I32x4_Extmul_High_I16x8_U = 0xFDBF,
848 | I64x2_Abs = 0xFDC0,
849 | I64x2_Neg = 0xFDC1,
850 | I64x2_AllTrue = 0xFDC3,
851 | I64x2_Bitmask = 0xFDC4,
852 | I64x2_Extend_Low_I32x4_S = 0xFDC7,
853 | I64x2_Extend_High_I32x4_S = 0xFDC8,
854 | I64x2_Extend_Low_I32x4_U = 0xFDC9,
855 | I64x2_Extend_High_I32x4_U = 0xFDCA,
856 | I64x2_Shl = 0xFDCB,
857 | I64x2_Shr_S = 0xFDCC,
858 | I64x2_Shr_U = 0xFDCD,
859 | I64x2_Add = 0xFDCE,
860 | I64x2_Sub = 0xFDD1,
861 | I64x2_Mul = 0xFDD5,
862 | I64x2_EQ = 0xFDD6,
863 | I64x2_NE = 0xFDD7,
864 | I64x2_LT_S = 0xFDD8,
865 | I64x2_GT_S = 0xFDD9,
866 | I64x2_LE_S = 0xFDDA,
867 | I64x2_GE_S = 0xFDDB,
868 | I64x2_Extmul_Low_I32x4_S = 0xFDDC,
869 | I64x2_Extmul_High_I32x4_S = 0xFDDD,
870 | I64x2_Extmul_Low_I32x4_U = 0xFDDE,
871 | I64x2_Extmul_High_I32x4_U = 0xFDDF,
872 | F32x4_Abs = 0xFDE0,
873 | F32x4_Neg = 0xFDE1,
874 | F32x4_Sqrt = 0xFDE3,
875 | F32x4_Add = 0xFDE4,
876 | F32x4_Sub = 0xFDE5,
877 | F32x4_Mul = 0xFDE6,
878 | F32x4_Div = 0xFDE7,
879 | F32x4_Min = 0xFDE8,
880 | F32x4_Max = 0xFDE9,
881 | F32x4_PMin = 0xFDEA,
882 | F32x4_PMax = 0xFDEB,
883 | F64x2_Abs = 0xFDEC,
884 | F64x2_Neg = 0xFDED,
885 | F64x2_Sqrt = 0xFDEF,
886 | F64x2_Add = 0xFDF0,
887 | F64x2_Sub = 0xFDF1,
888 | F64x2_Mul = 0xFDF2,
889 | F64x2_Div = 0xFDF3,
890 | F64x2_Min = 0xFDF4,
891 | F64x2_Max = 0xFDF5,
892 | F64x2_PMin = 0xFDF6,
893 | F64x2_PMax = 0xFDF7,
894 | F32x4_Trunc_Sat_F32x4_S = 0xFDF8,
895 | F32x4_Trunc_Sat_F32x4_U = 0xFDF9,
896 | F32x4_Convert_I32x4_S = 0xFDFA,
897 | F32x4_Convert_I32x4_U = 0xFDFB,
898 | I32x4_Trunc_Sat_F64x2_S_Zero = 0xFDFC,
899 | I32x4_Trunc_Sat_F64x2_U_Zero = 0xFDFD,
900 | F64x2_Convert_Low_I32x4_S = 0xFDFE,
901 | F64x2_Convert_Low_I32x4_U = 0xFDFF,
902 |
903 | pub fn toOpcode(wasm: WasmOpcode) Opcode {
904 | const opcode_int = @intFromEnum(wasm);
905 | var opcode: Opcode = undefined;
906 | if (opcode_int < ConversionTables.wasmOpcodeToOpcodeTable.len) {
907 | opcode = ConversionTables.wasmOpcodeToOpcodeTable[opcode_int];
908 | } else if (opcode_int >= 0xFC00 and opcode_int < 0xFCD0) {
909 | opcode = ConversionTables.wasmFCOpcodeToOpcodeTable[opcode_int - 0xFC00];
910 | } else {
911 | opcode = ConversionTables.wasmFDOpcodeToOpcodeTable[opcode_int - 0xFD00];
912 | }
913 | std.debug.assert(opcode != .Invalid);
914 | return opcode;
915 | }
916 |
917 | pub fn decode(reader: anytype) !WasmOpcode {
918 | const byte = try reader.readByte();
919 | var wasm_op: WasmOpcode = undefined;
920 | if (byte == 0xFC or byte == 0xFD) {
921 | const type_opcode = try common.decodeLEB128(u32, reader);
922 | if (type_opcode > std.math.maxInt(u8)) {
923 | return error.MalformedIllegalOpcode;
924 | }
925 | const byte2 = @as(u8, @intCast(type_opcode));
926 | var extended: u16 = byte;
927 | extended = extended << 8;
928 | extended |= byte2;
929 |
930 | wasm_op = std.meta.intToEnum(WasmOpcode, extended) catch {
931 | return error.MalformedIllegalOpcode;
932 | };
933 | } else {
934 | wasm_op = std.meta.intToEnum(WasmOpcode, byte) catch {
935 | return error.MalformedIllegalOpcode;
936 | };
937 | }
938 | return wasm_op;
939 | }
940 | };
941 |
942 | const ConversionTables = struct {
943 | const wasmOpcodeToOpcodeTable = [_]Opcode{
944 | Opcode.Unreachable, // 0x00
945 | Opcode.Noop, // 0x01
946 | Opcode.Block, // 0x02
947 | Opcode.Loop, // 0x03
948 | Opcode.If, // 0x04
949 | Opcode.Else, // 0x05
950 | Opcode.Invalid, // 0x06
951 | Opcode.Invalid, // 0x07
952 | Opcode.Invalid, // 0x08
953 | Opcode.Invalid, // 0x09
954 | Opcode.Invalid, // 0x0A
955 | Opcode.End, // 0x0B,
956 | Opcode.Branch, // 0x0C
957 | Opcode.Branch_If, // 0x0D
958 | Opcode.Branch_Table, // 0x0E
959 | Opcode.Return, // 0x0F
960 | Opcode.Call, // 0x10
961 | Opcode.Call_Indirect, // 0x11
962 | Opcode.Invalid, // 0x12
963 | Opcode.Invalid, // 0x13
964 | Opcode.Invalid, // 0x14
965 | Opcode.Invalid, // 0x15
966 | Opcode.Invalid, // 0x16
967 | Opcode.Invalid, // 0x17
968 | Opcode.Invalid, // 0x18
969 | Opcode.Invalid, // 0x19
970 | Opcode.Drop, // 0x1A
971 | Opcode.Select, // 0x1B
972 | Opcode.Select_T, // 0x1C
973 | Opcode.Invalid, // 0x1D
974 | Opcode.Invalid, // 0x1E
975 | Opcode.Invalid, // 0x1F
976 | Opcode.Local_Get, // 0x20
977 | Opcode.Local_Set, // 0x21
978 | Opcode.Local_Tee, // 0x22
979 | Opcode.Global_Get, // 0x23
980 | Opcode.Global_Set, // 0x24
981 | Opcode.Table_Get, // 0x25
982 | Opcode.Table_Set, // 0x26
983 | Opcode.Invalid, // 0x27
984 | Opcode.I32_Load, // 0x28
985 | Opcode.I64_Load, // 0x29
986 | Opcode.F32_Load, // 0x2A
987 | Opcode.F64_Load, // 0x2B
988 | Opcode.I32_Load8_S, // 0x2C
989 | Opcode.I32_Load8_U, // 0x2D
990 | Opcode.I32_Load16_S, // 0x2E
991 | Opcode.I32_Load16_U, // 0x2F
992 | Opcode.I64_Load8_S, // 0x30
993 | Opcode.I64_Load8_U, // 0x31
994 | Opcode.I64_Load16_S, // 0x32
995 | Opcode.I64_Load16_U, // 0x33
996 | Opcode.I64_Load32_S, // 0x34
997 | Opcode.I64_Load32_U, // 0x35
998 | Opcode.I32_Store, // 0x36
999 | Opcode.I64_Store, // 0x37
1000 | Opcode.F32_Store, // 0x38
1001 | Opcode.F64_Store, // 0x39
1002 | Opcode.I32_Store8, // 0x3A
1003 | Opcode.I32_Store16, // 0x3B
1004 | Opcode.I64_Store8, // 0x3C
1005 | Opcode.I64_Store16, // 0x3D
1006 | Opcode.I64_Store32, // 0x3E
1007 | Opcode.Memory_Size, // 0x3F
1008 | Opcode.Memory_Grow, // 0x40
1009 | Opcode.I32_Const, // 0x41
1010 | Opcode.I64_Const, // 0x42
1011 | Opcode.F32_Const, // 0x43
1012 | Opcode.F64_Const, // 0x44
1013 | Opcode.I32_Eqz, // 0x45
1014 | Opcode.I32_Eq, // 0x46
1015 | Opcode.I32_NE, // 0x47
1016 | Opcode.I32_LT_S, // 0x48
1017 | Opcode.I32_LT_U, // 0x49
1018 | Opcode.I32_GT_S, // 0x4A
1019 | Opcode.I32_GT_U, // 0x4B
1020 | Opcode.I32_LE_S, // 0x4C
1021 | Opcode.I32_LE_U, // 0x4D
1022 | Opcode.I32_GE_S, // 0x4E
1023 | Opcode.I32_GE_U, // 0x4F
1024 | Opcode.I64_Eqz, // 0x50
1025 | Opcode.I64_Eq, // 0x51
1026 | Opcode.I64_NE, // 0x52
1027 | Opcode.I64_LT_S, // 0x53
1028 | Opcode.I64_LT_U, // 0x54
1029 | Opcode.I64_GT_S, // 0x55
1030 | Opcode.I64_GT_U, // 0x56
1031 | Opcode.I64_LE_S, // 0x57
1032 | Opcode.I64_LE_U, // 0x58
1033 | Opcode.I64_GE_S, // 0x59
1034 | Opcode.I64_GE_U, // 0x5A
1035 | Opcode.F32_EQ, // 0x5B
1036 | Opcode.F32_NE, // 0x5C
1037 | Opcode.F32_LT, // 0x5D
1038 | Opcode.F32_GT, // 0x5E
1039 | Opcode.F32_LE, // 0x5F
1040 | Opcode.F32_GE, // 0x60
1041 | Opcode.F64_EQ, // 0x61
1042 | Opcode.F64_NE, // 0x62
1043 | Opcode.F64_LT, // 0x63
1044 | Opcode.F64_GT, // 0x64
1045 | Opcode.F64_LE, // 0x65
1046 | Opcode.F64_GE, // 0x66
1047 | Opcode.I32_Clz, // 0x67
1048 | Opcode.I32_Ctz, // 0x68
1049 | Opcode.I32_Popcnt, // 0x69
1050 | Opcode.I32_Add, // 0x6A
1051 | Opcode.I32_Sub, // 0x6B
1052 | Opcode.I32_Mul, // 0x6C
1053 | Opcode.I32_Div_S, // 0x6D
1054 | Opcode.I32_Div_U, // 0x6E
1055 | Opcode.I32_Rem_S, // 0x6F
1056 | Opcode.I32_Rem_U, // 0x70
1057 | Opcode.I32_And, // 0x71
1058 | Opcode.I32_Or, // 0x72
1059 | Opcode.I32_Xor, // 0x73
1060 | Opcode.I32_Shl, // 0x74
1061 | Opcode.I32_Shr_S, // 0x75
1062 | Opcode.I32_Shr_U, // 0x76
1063 | Opcode.I32_Rotl, // 0x77
1064 | Opcode.I32_Rotr, // 0x78
1065 | Opcode.I64_Clz, // 0x79
1066 | Opcode.I64_Ctz, // 0x7A
1067 | Opcode.I64_Popcnt, // 0x7B
1068 | Opcode.I64_Add, // 0x7C
1069 | Opcode.I64_Sub, // 0x7D
1070 | Opcode.I64_Mul, // 0x7E
1071 | Opcode.I64_Div_S, // 0x7F
1072 | Opcode.I64_Div_U, // 0x80
1073 | Opcode.I64_Rem_S, // 0x81
1074 | Opcode.I64_Rem_U, // 0x82
1075 | Opcode.I64_And, // 0x83
1076 | Opcode.I64_Or, // 0x84
1077 | Opcode.I64_Xor, // 0x85
1078 | Opcode.I64_Shl, // 0x86
1079 | Opcode.I64_Shr_S, // 0x87
1080 | Opcode.I64_Shr_U, // 0x88
1081 | Opcode.I64_Rotl, // 0x89
1082 | Opcode.I64_Rotr, // 0x8A
1083 | Opcode.F32_Abs, // 0x8B
1084 | Opcode.F32_Neg, // 0x8C
1085 | Opcode.F32_Ceil, // 0x8D
1086 | Opcode.F32_Floor, // 0x8E
1087 | Opcode.F32_Trunc, // 0x8F
1088 | Opcode.F32_Nearest, // 0x90
1089 | Opcode.F32_Sqrt, // 0x91
1090 | Opcode.F32_Add, // 0x92
1091 | Opcode.F32_Sub, // 0x93
1092 | Opcode.F32_Mul, // 0x94
1093 | Opcode.F32_Div, // 0x95
1094 | Opcode.F32_Min, // 0x96
1095 | Opcode.F32_Max, // 0x97
1096 | Opcode.F32_Copysign, // 0x98
1097 | Opcode.F64_Abs, // 0x99
1098 | Opcode.F64_Neg, // 0x9A
1099 | Opcode.F64_Ceil, // 0x9B
1100 | Opcode.F64_Floor, // 0x9C
1101 | Opcode.F64_Trunc, // 0x9D
1102 | Opcode.F64_Nearest, // 0x9E
1103 | Opcode.F64_Sqrt, // 0x9F
1104 | Opcode.F64_Add, // 0xA0
1105 | Opcode.F64_Sub, // 0xA1
1106 | Opcode.F64_Mul, // 0xA2
1107 | Opcode.F64_Div, // 0xA3
1108 | Opcode.F64_Min, // 0xA4
1109 | Opcode.F64_Max, // 0xA5
1110 | Opcode.F64_Copysign, // 0xA6
1111 | Opcode.I32_Wrap_I64, // 0xA7
1112 | Opcode.I32_Trunc_F32_S, // 0xA8
1113 | Opcode.I32_Trunc_F32_U, // 0xA9
1114 | Opcode.I32_Trunc_F64_S, // 0xAA
1115 | Opcode.I32_Trunc_F64_U, // 0xAB
1116 | Opcode.I64_Extend_I32_S, // 0xAC
1117 | Opcode.I64_Extend_I32_U, // 0xAD
1118 | Opcode.I64_Trunc_F32_S, // 0xAE
1119 | Opcode.I64_Trunc_F32_U, // 0xAF
1120 | Opcode.I64_Trunc_F64_S, // 0xB0
1121 | Opcode.I64_Trunc_F64_U, // 0xB1
1122 | Opcode.F32_Convert_I32_S, // 0xB2
1123 | Opcode.F32_Convert_I32_U, // 0xB3
1124 | Opcode.F32_Convert_I64_S, // 0xB4
1125 | Opcode.F32_Convert_I64_U, // 0xB5
1126 | Opcode.F32_Demote_F64, // 0xB6
1127 | Opcode.F64_Convert_I32_S, // 0xB7
1128 | Opcode.F64_Convert_I32_U, // 0xB8
1129 | Opcode.F64_Convert_I64_S, // 0xB9
1130 | Opcode.F64_Convert_I64_U, // 0xBA
1131 | Opcode.F64_Promote_F32, // 0xBB
1132 | Opcode.I32_Reinterpret_F32, // 0xBC
1133 | Opcode.I64_Reinterpret_F64, // 0xBD
1134 | Opcode.F32_Reinterpret_I32, // 0xBE
1135 | Opcode.F64_Reinterpret_I64, // 0xBF
1136 | Opcode.I32_Extend8_S, // 0xC0
1137 | Opcode.I32_Extend16_S, // 0xC1
1138 | Opcode.I64_Extend8_S, // 0xC2
1139 | Opcode.I64_Extend16_S, // 0xC3
1140 | Opcode.I64_Extend32_S, // 0xC4
1141 | Opcode.Invalid, // 0xC5
1142 | Opcode.Invalid, // 0xC6
1143 | Opcode.Invalid, // 0xC7
1144 | Opcode.Invalid, // 0xC8
1145 | Opcode.Invalid, // 0xC9
1146 | Opcode.Invalid, // 0xCA
1147 | Opcode.Invalid, // 0xCB
1148 | Opcode.Invalid, // 0xCC
1149 | Opcode.Invalid, // 0xCD
1150 | Opcode.Invalid, // 0xCE
1151 | Opcode.Invalid, // 0xCF
1152 | Opcode.Ref_Null, // 0xD0
1153 | Opcode.Ref_Is_Null, // 0xD1
1154 | Opcode.Ref_Func, // 0xD2
1155 | };
1156 |
1157 | const wasmFCOpcodeToOpcodeTable = [_]Opcode{
1158 | Opcode.I32_Trunc_Sat_F32_S, // 0xFC00
1159 | Opcode.I32_Trunc_Sat_F32_U, // 0xFC01
1160 | Opcode.I32_Trunc_Sat_F64_S, // 0xFC02
1161 | Opcode.I32_Trunc_Sat_F64_U, // 0xFC03
1162 | Opcode.I64_Trunc_Sat_F32_S, // 0xFC04
1163 | Opcode.I64_Trunc_Sat_F32_U, // 0xFC05
1164 | Opcode.I64_Trunc_Sat_F64_S, // 0xFC06
1165 | Opcode.I64_Trunc_Sat_F64_U, // 0xFC07
1166 | Opcode.Memory_Init, // 0xFC08
1167 | Opcode.Data_Drop, // 0xFC09
1168 | Opcode.Memory_Copy, // 0xFC0A
1169 | Opcode.Memory_Fill, // 0xFC0B
1170 | Opcode.Table_Init, // 0xFC0C
1171 | Opcode.Elem_Drop, // 0xFC0D
1172 | Opcode.Table_Copy, // 0xFC0E
1173 | Opcode.Table_Grow, // 0xFC0F
1174 | Opcode.Table_Size, // 0xFC10
1175 | Opcode.Table_Fill, // 0xFC11
1176 | };
1177 |
1178 | const wasmFDOpcodeToOpcodeTable = [_]Opcode{
1179 | Opcode.V128_Load, // 0xFD00
1180 | Opcode.V128_Load8x8_S, // 0xFD01
1181 | Opcode.V128_Load8x8_U, // 0xFD02
1182 | Opcode.V128_Load16x4_S, // 0xFD03
1183 | Opcode.V128_Load16x4_U, // 0xFD04
1184 | Opcode.V128_Load32x2_S, // 0xFD05
1185 | Opcode.V128_Load32x2_U, // 0xFD06
1186 | Opcode.V128_Load8_Splat, // 0xFD07
1187 | Opcode.V128_Load16_Splat, // 0xFD08
1188 | Opcode.V128_Load32_Splat, // 0xFD09
1189 | Opcode.V128_Load64_Splat, // 0xFD0A
1190 | Opcode.V128_Store, // 0xFD0B
1191 | Opcode.V128_Const, // 0xFD0C
1192 | Opcode.I8x16_Shuffle, // 0xFD0D
1193 | Opcode.I8x16_Swizzle, // 0xFD0E
1194 | Opcode.I8x16_Splat, // 0xFD0F
1195 | Opcode.I16x8_Splat, // 0xFD10
1196 | Opcode.I32x4_Splat, // 0xFD11
1197 | Opcode.I64x2_Splat, // 0xFD12
1198 | Opcode.F32x4_Splat, // 0xFD13
1199 | Opcode.F64x2_Splat, // 0xFD14
1200 | Opcode.I8x16_Extract_Lane_S, // 0xFD15
1201 | Opcode.I8x16_Extract_Lane_U, // 0xFD16
1202 | Opcode.I8x16_Replace_Lane, // 0xFD17
1203 | Opcode.I16x8_Extract_Lane_S, // 0xFD18
1204 | Opcode.I16x8_Extract_Lane_U, // 0xFD19
1205 | Opcode.I16x8_Replace_Lane, // 0xFD1A
1206 | Opcode.I32x4_Extract_Lane, // 0xFD1B
1207 | Opcode.I32x4_Replace_Lane, // 0xFD1C
1208 | Opcode.I64x2_Extract_Lane, // 0xFD1D
1209 | Opcode.I64x2_Replace_Lane, // 0xFD1E
1210 | Opcode.F32x4_Extract_Lane, // 0xFD1F
1211 | Opcode.F32x4_Replace_Lane, // 0xFD20
1212 | Opcode.F64x2_Extract_Lane, // 0xFD21
1213 | Opcode.F64x2_Replace_Lane, // 0xFD22
1214 | Opcode.I8x16_EQ, // 0xFD23
1215 | Opcode.I8x16_NE, // 0xFD24
1216 | Opcode.I8x16_LT_S, // 0xFD25
1217 | Opcode.I8x16_LT_U, // 0xFD26
1218 | Opcode.I8x16_GT_S, // 0xFD27
1219 | Opcode.I8x16_GT_U, // 0xFD28
1220 | Opcode.I8x16_LE_S, // 0xFD29
1221 | Opcode.I8x16_LE_U, // 0xFD2A
1222 | Opcode.I8x16_GE_S, // 0xFD2B
1223 | Opcode.I8x16_GE_U, // 0xFD2C
1224 | Opcode.I16x8_EQ, // 0xFD2D
1225 | Opcode.I16x8_NE, // 0xFD2E
1226 | Opcode.I16x8_LT_S, // 0xFD2F
1227 | Opcode.I16x8_LT_U, // 0xFD30
1228 | Opcode.I16x8_GT_S, // 0xFD31
1229 | Opcode.I16x8_GT_U, // 0xFD32
1230 | Opcode.I16x8_LE_S, // 0xFD33
1231 | Opcode.I16x8_LE_U, // 0xFD34
1232 | Opcode.I16x8_GE_S, // 0xFD35
1233 | Opcode.I16x8_GE_U, // 0xFD36
1234 | Opcode.I32x4_EQ, // 0xFD37
1235 | Opcode.I32x4_NE, // 0xFD38
1236 | Opcode.I32x4_LT_S, // 0xFD39
1237 | Opcode.I32x4_LT_U, // 0xFD3A
1238 | Opcode.I32x4_GT_S, // 0xFD3B
1239 | Opcode.I32x4_GT_U, // 0xFD3C
1240 | Opcode.I32x4_LE_S, // 0xFD3D
1241 | Opcode.I32x4_LE_U, // 0xFD3E
1242 | Opcode.I32x4_GE_S, // 0xFD3F
1243 | Opcode.I32x4_GE_U, // 0xFD40
1244 | Opcode.F32x4_EQ, // 0xFD41
1245 | Opcode.F32x4_NE, // 0xFD42
1246 | Opcode.F32x4_LT, // 0xFD43
1247 | Opcode.F32x4_GT, // 0xFD44
1248 | Opcode.F32x4_LE, // 0xFD45
1249 | Opcode.F32x4_GE, // 0xFD46
1250 | Opcode.F64x2_EQ, // 0xFD47
1251 | Opcode.F64x2_NE, // 0xFD48
1252 | Opcode.F64x2_LT, // 0xFD49
1253 | Opcode.F64x2_GT, // 0xFD4A
1254 | Opcode.F64x2_LE, // 0xFD4B
1255 | Opcode.F64x2_GE, // 0xFD4C
1256 | Opcode.V128_Not, // 0xFD4D
1257 | Opcode.V128_And, // 0xFD4E
1258 | Opcode.V128_AndNot, // 0xFD4F
1259 | Opcode.V128_Or, // 0xFD50
1260 | Opcode.V128_Xor, // 0xFD51
1261 | Opcode.V128_Bitselect, // 0xFD52
1262 | Opcode.V128_AnyTrue, // 0xFD53
1263 | Opcode.V128_Load8_Lane, // 0xFD54
1264 | Opcode.V128_Load16_Lane, // 0xFD55
1265 | Opcode.V128_Load32_Lane, // 0xFD56
1266 | Opcode.V128_Load64_Lane, // 0xFD57
1267 | Opcode.V128_Store8_Lane, // 0xFD58
1268 | Opcode.V128_Store16_Lane, // 0xFD59
1269 | Opcode.V128_Store32_Lane, // 0xFD5A
1270 | Opcode.V128_Store64_Lane, // 0xFD5B
1271 | Opcode.V128_Load32_Zero, // 0xFD5C
1272 | Opcode.V128_Load64_Zero, // 0xFD5D
1273 | Opcode.F32x4_Demote_F64x2_Zero, // 0xFD5E
1274 | Opcode.F64x2_Promote_Low_F32x4, // 0xFD5F
1275 | Opcode.I8x16_Abs, // 0xFD60
1276 | Opcode.I8x16_Neg, // 0xFD61
1277 | Opcode.I8x16_Popcnt, // 0xFD62
1278 | Opcode.I8x16_AllTrue, // 0xFD63
1279 | Opcode.I8x16_Bitmask, // 0xFD64
1280 | Opcode.I8x16_Narrow_I16x8_S, // 0xFD65
1281 | Opcode.I8x16_Narrow_I16x8_U, // 0xFD66
1282 | Opcode.F32x4_Ceil, // 0xFD67
1283 | Opcode.F32x4_Floor, // 0xFD68
1284 | Opcode.F32x4_Trunc, // 0xFD69
1285 | Opcode.F32x4_Nearest, // 0xFD6A
1286 | Opcode.I8x16_Shl, // 0xFD6B
1287 | Opcode.I8x16_Shr_S, // 0xFD6C
1288 | Opcode.I8x16_Shr_U, // 0xFD6D
1289 | Opcode.I8x16_Add, // 0xFD6E
1290 | Opcode.I8x16_Add_Sat_S, // 0xFD6F
1291 | Opcode.I8x16_Add_Sat_U, // 0xFD70
1292 | Opcode.I8x16_Sub, // 0xFD71
1293 | Opcode.I8x16_Sub_Sat_S, // 0xFD72
1294 | Opcode.I8x16_Sub_Sat_U, // 0xFD73
1295 | Opcode.F64x2_Ceil, // 0xFD74
1296 | Opcode.F64x2_Floor, // 0xFD75
1297 | Opcode.I8x16_Min_S, // 0xFD76
1298 | Opcode.I8x16_Min_U, // 0xFD77
1299 | Opcode.I8x16_Max_S, // 0xFD78
1300 | Opcode.I8x16_Max_U, // 0xFD79
1301 | Opcode.F64x2_Trunc, // 0xFD7A
1302 | Opcode.I8x16_Avgr_U, // 0xFD7B
1303 | Opcode.I16x8_Extadd_Pairwise_I8x16_S, // 0xFD7C
1304 | Opcode.I16x8_Extadd_Pairwise_I8x16_U, // 0xFD7D
1305 | Opcode.I32x4_Extadd_Pairwise_I16x8_S, // 0xFD7E
1306 | Opcode.I32x4_Extadd_Pairwise_I16x8_U, // 0xFD7F
1307 | Opcode.I16x8_Abs, // 0xFD80
1308 | Opcode.I16x8_Neg, // 0xFD81
1309 | Opcode.I16x8_Q15mulr_Sat_S, // 0xFD82
1310 | Opcode.I16x8_AllTrue, // 0xFD83
1311 | Opcode.I16x8_Bitmask, // 0xFD84
1312 | Opcode.I16x8_Narrow_I32x4_S, // 0xFD85
1313 | Opcode.I16x8_Narrow_I32x4_U, // 0xFD86
1314 | Opcode.I16x8_Extend_Low_I8x16_S, // 0xFD87
1315 | Opcode.I16x8_Extend_High_I8x16_S, // 0xFD88
1316 | Opcode.I16x8_Extend_Low_I8x16_U, // 0xFD89
1317 | Opcode.I16x8_Extend_High_I8x16_U, // 0xFD8A
1318 | Opcode.I16x8_Shl, // 0xFD8B
1319 | Opcode.I16x8_Shr_S, // 0xFD8C
1320 | Opcode.I16x8_Shr_U, // 0xFD8D
1321 | Opcode.I16x8_Add, // 0xFD8E
1322 | Opcode.I16x8_Add_Sat_S, // 0xFD8F
1323 | Opcode.I16x8_Add_Sat_U, // 0xFD90
1324 | Opcode.I16x8_Sub, // 0xFD91
1325 | Opcode.I16x8_Sub_Sat_S, // 0xFD92
1326 | Opcode.I16x8_Sub_Sat_U, // 0xFD93
1327 | Opcode.F64x2_Nearest, // 0xFD94
1328 | Opcode.I16x8_Mul, // 0xFD95
1329 | Opcode.I16x8_Min_S, // 0xFD96
1330 | Opcode.I16x8_Min_U, // 0xFD97
1331 | Opcode.I16x8_Max_S, // 0xFD98
1332 | Opcode.I16x8_Max_U, // 0xFD99
1333 | Opcode.Invalid, // 0xFD9A
1334 | Opcode.I16x8_Avgr_U, // 0xFD9B
1335 | Opcode.I16x8_Extmul_Low_I8x16_S, // 0xFD9C
1336 | Opcode.I16x8_Extmul_High_I8x16_S, // 0xFD9D
1337 | Opcode.I16x8_Extmul_Low_I8x16_U, // 0xFD9E
1338 | Opcode.I16x8_Extmul_High_I8x16_U, // 0xFD9F
1339 | Opcode.I32x4_Abs, // 0xFDA0
1340 | Opcode.I32x4_Neg, // 0xFDA1
1341 | Opcode.Invalid, // 0xFDA2
1342 | Opcode.I32x4_AllTrue, // 0xFDA3
1343 | Opcode.I32x4_Bitmask, // 0xFDA4
1344 | Opcode.Invalid, // 0xFDA5
1345 | Opcode.Invalid, // 0xFDA6
1346 | Opcode.I32x4_Extend_Low_I16x8_S, // 0xFDA7
1347 | Opcode.I32x4_Extend_High_I16x8_S, // 0xFDA8
1348 | Opcode.I32x4_Extend_Low_I16x8_U, // 0xFDA9
1349 | Opcode.I32x4_Extend_High_I16x8_U, // 0xFDAA
1350 | Opcode.I32x4_Shl, // 0xFDAB
1351 | Opcode.I32x4_Shr_S, // 0xFDAC
1352 | Opcode.I32x4_Shr_U, // 0xFDAD
1353 | Opcode.I32x4_Add, // 0xFDAE
1354 | Opcode.Invalid, // 0xFDAF
1355 | Opcode.Invalid, // 0xFDB0
1356 | Opcode.I32x4_Sub, // 0xFDB1
1357 | Opcode.Invalid, // 0xFDB2
1358 | Opcode.Invalid, // 0xFDB3
1359 | Opcode.Invalid, // 0xFDB4
1360 | Opcode.I32x4_Mul, // 0xFDB5
1361 | Opcode.I32x4_Min_S, // 0xFDB6
1362 | Opcode.I32x4_Min_U, // 0xFDB7
1363 | Opcode.I32x4_Max_S, // 0xFDB8
1364 | Opcode.I32x4_Max_U, // 0xFDB9
1365 | Opcode.I32x4_Dot_I16x8_S, // 0xFDBA
1366 | Opcode.Invalid, // 0xFDBB
1367 | Opcode.I32x4_Extmul_Low_I16x8_S, // 0xFDBC
1368 | Opcode.I32x4_Extmul_High_I16x8_S, // 0xFDBD
1369 | Opcode.I32x4_Extmul_Low_I16x8_U, // 0xFDBE
1370 | Opcode.I32x4_Extmul_High_I16x8_U, // 0xFDBF
1371 | Opcode.I64x2_Abs, // 0xFDC0
1372 | Opcode.I64x2_Neg, // 0xFDC1
1373 | Opcode.Invalid, // 0xFDC2
1374 | Opcode.I64x2_AllTrue, // 0xFDC3
1375 | Opcode.I64x2_Bitmask, // 0xFDC4
1376 | Opcode.Invalid, // 0xFDC5
1377 | Opcode.Invalid, // 0xFDC6
1378 | Opcode.I64x2_Extend_Low_I32x4_S, // 0xFDC7
1379 | Opcode.I64x2_Extend_High_I32x4_S, // 0xFDC8
1380 | Opcode.I64x2_Extend_Low_I32x4_U, // 0xFDC9
1381 | Opcode.I64x2_Extend_High_I32x4_U, // 0xFDCA
1382 | Opcode.I64x2_Shl, // 0xFDCB
1383 | Opcode.I64x2_Shr_S, // 0xFDCC
1384 | Opcode.I64x2_Shr_U, // 0xFDCD
1385 | Opcode.I64x2_Add, // 0xFDCE
1386 | Opcode.Invalid, // 0xFDCF
1387 | Opcode.Invalid, // 0xFDD0
1388 | Opcode.I64x2_Sub, // 0xFDD1
1389 | Opcode.Invalid, // 0xFDD2
1390 | Opcode.Invalid, // 0xFDD3
1391 | Opcode.Invalid, // 0xFDD4
1392 | Opcode.I64x2_Mul, // 0xFDD5
1393 | Opcode.I64x2_EQ, // 0xFDD6
1394 | Opcode.I64x2_NE, // 0xFDD7
1395 | Opcode.I64x2_LT_S, // 0xFDD8
1396 | Opcode.I64x2_GT_S, // 0xFDD9
1397 | Opcode.I64x2_LE_S, // 0xFDDA
1398 | Opcode.I64x2_GE_S, // 0xFDDB
1399 | Opcode.I64x2_Extmul_Low_I32x4_S, // 0xFDDC
1400 | Opcode.I64x2_Extmul_High_I32x4_S, // 0xFDDD
1401 | Opcode.I64x2_Extmul_Low_I32x4_U, // 0xFDDE
1402 | Opcode.I64x2_Extmul_High_I32x4_U, // 0xFDDF
1403 | Opcode.F32x4_Abs, // 0xFDE0
1404 | Opcode.F32x4_Neg, // 0xFDE1
1405 | Opcode.Invalid, // 0xFDE2
1406 | Opcode.F32x4_Sqrt, // 0xFDE3
1407 | Opcode.F32x4_Add, // 0xFDE4
1408 | Opcode.F32x4_Sub, // 0xFDE5
1409 | Opcode.F32x4_Mul, // 0xFDE6
1410 | Opcode.F32x4_Div, // 0xFDE7
1411 | Opcode.F32x4_Min, // 0xFDE8
1412 | Opcode.F32x4_Max, // 0xFDE9
1413 | Opcode.F32x4_PMin, // 0xFDEA
1414 | Opcode.F32x4_PMax, // 0xFDEB
1415 | Opcode.F64x2_Abs, // 0xFDEC
1416 | Opcode.F64x2_Neg, // 0xFDED
1417 | Opcode.Invalid, // 0xFDEE
1418 | Opcode.F64x2_Sqrt, // 0xFDEF
1419 | Opcode.F64x2_Add, // 0xFDF0
1420 | Opcode.F64x2_Sub, // 0xFDF1
1421 | Opcode.F64x2_Mul, // 0xFDF2
1422 | Opcode.F64x2_Div, // 0xFDF3
1423 | Opcode.F64x2_Min, // 0xFDF4
1424 | Opcode.F64x2_Max, // 0xFDF5
1425 | Opcode.F64x2_PMin, // 0xFDF6
1426 | Opcode.F64x2_PMax, // 0xFDF7
1427 | Opcode.F32x4_Trunc_Sat_F32x4_S, // 0xFDF8
1428 | Opcode.F32x4_Trunc_Sat_F32x4_U, // 0xFDF9
1429 | Opcode.F32x4_Convert_I32x4_S, // 0xFDFA
1430 | Opcode.F32x4_Convert_I32x4_U, // 0xFDFB
1431 | Opcode.I32x4_Trunc_Sat_F64x2_S_Zero, // 0xFDFC
1432 | Opcode.I32x4_Trunc_Sat_F64x2_U_Zero, // 0xFDFD
1433 | Opcode.F64x2_Convert_Low_I32x4_S, // 0xFDFE
1434 | Opcode.F64x2_Convert_Low_I32x4_U, // 0xFDFF
1435 | };
1436 | };
1437 |
--------------------------------------------------------------------------------
/src/stringpool.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const StableArray = @import("stable-array").StableArray;
3 |
4 | const hashString = std.hash_map.hashString;
5 | const StringHashLookupTable = std.hash_map.AutoHashMap(u64, usize);
6 |
7 | const StringPool = @This();
8 |
9 | buffer: StableArray(u8),
10 | lookup: StringHashLookupTable,
11 |
12 | const StringLenType = u16;
13 |
14 | pub const PutError = error{StringLengthTooLong};
15 |
16 | pub fn init(max_stringpool_bytes: usize, allocator: std.mem.Allocator) StringPool {
17 | return StringPool{
18 | .buffer = StableArray(u8).init(max_stringpool_bytes),
19 | .lookup = StringHashLookupTable.init(allocator),
20 | };
21 | }
22 |
23 | pub fn deinit(self: *StringPool) void {
24 | self.buffer.deinit();
25 | self.lookup.deinit();
26 | }
27 |
28 | pub fn put(self: *StringPool, str: []const u8) ![]const u8 {
29 | if (str.len > std.math.maxInt(StringLenType)) {
30 | return error.StringLengthTooLong;
31 | }
32 |
33 | const hash: u64 = hashString(str);
34 |
35 | // alignment requirements for StringLenType may require the buffer to be 1 byte larger than string size + sizeOf(StringLenType)
36 | // so take care not to include the final byte in the string + size byte buffer
37 | const string_and_size_num_bytes: usize = str.len + @sizeOf(StringLenType);
38 | const alloc_size = std.mem.alignForward(usize, string_and_size_num_bytes, @alignOf(StringLenType));
39 | const str_offset_begin: usize = self.buffer.items.len;
40 | const str_offset_end: usize = str_offset_begin + string_and_size_num_bytes;
41 | const aligned_buffer_end: usize = str_offset_begin + alloc_size;
42 |
43 | try self.buffer.resize(aligned_buffer_end);
44 | try self.lookup.put(hash, str_offset_begin);
45 |
46 | var bytes: []u8 = self.buffer.items[str_offset_begin..str_offset_end];
47 | const str_len: *StringLenType = @alignCast(@ptrCast(bytes.ptr));
48 | str_len.* = @as(StringLenType, @intCast(str.len));
49 | const str_bytes: []u8 = bytes[@sizeOf(StringLenType)..];
50 | @memcpy(str_bytes, str);
51 |
52 | return str_bytes;
53 | }
54 |
55 | pub fn find(self: *StringPool, str: []const u8) ?[]const u8 {
56 | const hash: u64 = hashString(str);
57 |
58 | if (self.lookup.get(hash)) |string_bytes_begin| {
59 | var str_bytes: [*]u8 = self.buffer.items[string_bytes_begin..].ptr;
60 | const str_len: *StringLenType = @alignCast(@ptrCast(str_bytes));
61 | const pooled_str: []u8 = str_bytes[@sizeOf(StringLenType) .. @sizeOf(StringLenType) + str_len.*];
62 | return pooled_str;
63 | }
64 |
65 | return null;
66 | }
67 |
68 | pub fn findOrPut(self: *StringPool, str: []const u8) ![]const u8 {
69 | if (self.find(str)) |found| {
70 | return found;
71 | }
72 |
73 | return try self.put(str);
74 | }
75 |
76 | test "basic" {
77 | const test_str: []const u8 = "test";
78 | const test1_str: []const u8 = "test";
79 | const test2_str: []const u8 = "test2";
80 | const long_str: []const u8 = "a very long string that has no end repeated many times! a very long string that has no end repeated many times! a very long string that has no end repeated many times!";
81 |
82 | var pool = StringPool.init(4096, std.testing.allocator);
83 | defer pool.deinit();
84 |
85 | const test_str_added = try pool.put(test_str);
86 | const test1_str_added = try pool.put(test1_str);
87 | const test2_str_added = try pool.put(test2_str);
88 | const long_str_added = try pool.put(long_str);
89 |
90 | try std.testing.expect(test_str_added.ptr != test_str.ptr);
91 | try std.testing.expect(test1_str_added.ptr != test1_str.ptr);
92 | try std.testing.expect(test2_str_added.ptr != test2_str.ptr);
93 | try std.testing.expect(long_str_added.ptr != long_str.ptr);
94 |
95 | const test_str_found = pool.find(test_str);
96 | const test1_str_found = pool.find(test1_str);
97 | const test2_str_found = pool.find(test2_str);
98 | const long_str_found = pool.find(long_str);
99 |
100 | try std.testing.expect(test_str_found != null);
101 | try std.testing.expect(test1_str_found != null);
102 | try std.testing.expect(test2_str_found != null);
103 | try std.testing.expect(long_str_found != null);
104 |
105 | try std.testing.expect(test_str_found.?.ptr != test_str.ptr);
106 | try std.testing.expect(test1_str_found.?.ptr != test1_str.ptr);
107 | try std.testing.expect(test2_str_found.?.ptr != test2_str.ptr);
108 | try std.testing.expect(long_str_found.?.ptr != long_str.ptr);
109 |
110 | std.debug.print("found: {s}, existing: {s}\n", .{ test_str_found.?, test_str });
111 |
112 | try std.testing.expect(std.mem.eql(u8, test_str_found.?, test_str));
113 | try std.testing.expect(std.mem.eql(u8, test1_str_found.?, test1_str));
114 | try std.testing.expect(std.mem.eql(u8, test2_str_found.?, test2_str));
115 | try std.testing.expect(std.mem.eql(u8, long_str_found.?, long_str));
116 |
117 | const lazyadd_str1 = try pool.findOrPut("lazy put");
118 | const lazyadd_str2 = try pool.findOrPut("lazy put");
119 | try std.testing.expect(lazyadd_str1.ptr == lazyadd_str2.ptr);
120 | }
121 |
--------------------------------------------------------------------------------
/src/tests.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const testing = std.testing;
3 | const expectEqual = testing.expectEqual;
4 |
5 | const core = @import("core.zig");
6 | const Limits = core.Limits;
7 | const MemoryInstance = core.MemoryInstance;
8 |
9 | const metering = @import("metering.zig");
10 |
11 | test "StackVM.Integration" {
12 | const wasm_filepath = "zig-out/bin/mandelbrot.wasm";
13 |
14 | var allocator = std.testing.allocator;
15 |
16 | var cwd = std.fs.cwd();
17 | const wasm_data: []u8 = try cwd.readFileAlloc(allocator, wasm_filepath, 1024 * 1024 * 128);
18 | defer allocator.free(wasm_data);
19 |
20 | const module_def_opts = core.ModuleDefinitionOpts{
21 | .debug_name = std.fs.path.basename(wasm_filepath),
22 | };
23 | var module_def = try core.createModuleDefinition(allocator, module_def_opts);
24 | defer module_def.destroy();
25 |
26 | try module_def.decode(wasm_data);
27 |
28 | var module_inst = try core.createModuleInstance(.Stack, module_def, allocator);
29 | defer module_inst.destroy();
30 | }
31 |
32 | test "StackVM.Metering" {
33 | if (!metering.enabled) {
34 | return;
35 | }
36 | const wasm_filepath = "zig-out/bin/fibonacci.wasm";
37 |
38 | var allocator = std.testing.allocator;
39 |
40 | var cwd = std.fs.cwd();
41 | const wasm_data: []u8 = try cwd.readFileAlloc(allocator, wasm_filepath, 1024 * 1024 * 128);
42 | defer allocator.free(wasm_data);
43 |
44 | const module_def_opts = core.ModuleDefinitionOpts{
45 | .debug_name = std.fs.path.basename(wasm_filepath),
46 | };
47 | var module_def = try core.createModuleDefinition(allocator, module_def_opts);
48 | defer module_def.destroy();
49 |
50 | try module_def.decode(wasm_data);
51 |
52 | var module_inst = try core.createModuleInstance(.Stack, module_def, allocator);
53 | defer module_inst.destroy();
54 |
55 | try module_inst.instantiate(.{});
56 |
57 | var returns = [1]core.Val{.{ .I64 = 5555 }};
58 | var params = [1]core.Val{.{ .I32 = 10 }};
59 |
60 | const handle = try module_inst.getFunctionHandle("run");
61 | const res = module_inst.invoke(handle, ¶ms, &returns, .{
62 | .meter = 2,
63 | });
64 | try std.testing.expectError(metering.MeteringTrapError.TrapMeterExceeded, res);
65 | try std.testing.expectEqual(5555, returns[0].I32);
66 |
67 | const res2 = module_inst.resumeInvoke(&returns, .{ .meter = 5 });
68 | try std.testing.expectError(metering.MeteringTrapError.TrapMeterExceeded, res2);
69 | try std.testing.expectEqual(5555, returns[0].I32);
70 |
71 | try module_inst.resumeInvoke(&returns, .{ .meter = 10000 });
72 | try std.testing.expectEqual(89, returns[0].I32);
73 | }
74 |
75 | test "MemoryInstance.init" {
76 | {
77 | const limits = Limits{
78 | .min = 0,
79 | .max = null,
80 | .limit_type = 0, // i32 index type
81 | };
82 | var memory = MemoryInstance.init(limits, null);
83 | defer memory.deinit();
84 | try expectEqual(memory.limits.min, 0);
85 | try expectEqual(memory.limits.max, Limits.k_max_pages_i32);
86 | try expectEqual(memory.size(), 0);
87 | try expectEqual(memory.mem.Internal.items.len, 0);
88 | }
89 |
90 | {
91 | const limits = Limits{
92 | .min = 0,
93 | .max = null,
94 | .limit_type = 4, // i64 index type
95 | };
96 | var memory = MemoryInstance.init(limits, null);
97 | defer memory.deinit();
98 | try expectEqual(memory.limits.min, 0);
99 | try expectEqual(memory.limits.max, Limits.k_max_pages_i64);
100 | try expectEqual(memory.size(), 0);
101 | try expectEqual(memory.mem.Internal.items.len, 0);
102 | }
103 |
104 | {
105 | const limits = Limits{
106 | .min = 25,
107 | .max = 25,
108 | .limit_type = 1,
109 | };
110 | var memory = MemoryInstance.init(limits, null);
111 | defer memory.deinit();
112 | try expectEqual(memory.limits.min, 0);
113 | try expectEqual(memory.limits.max, limits.max);
114 | try expectEqual(memory.mem.Internal.items.len, 0);
115 | }
116 | }
117 |
118 | test "MemoryInstance.Internal.grow" {
119 | {
120 | const limits = Limits{
121 | .min = 0,
122 | .max = null,
123 | .limit_type = 0,
124 | };
125 | var memory = MemoryInstance.init(limits, null);
126 | defer memory.deinit();
127 | try expectEqual(memory.grow(0), true);
128 | try expectEqual(memory.grow(1), true);
129 | try expectEqual(memory.size(), 1);
130 | try expectEqual(memory.grow(1), true);
131 | try expectEqual(memory.size(), 2);
132 | try expectEqual(memory.grow(Limits.k_max_pages_i32 - memory.size()), true);
133 | try expectEqual(memory.size(), Limits.k_max_pages_i32);
134 | }
135 |
136 | {
137 | const limits = Limits{
138 | .min = 0,
139 | .max = 25,
140 | .limit_type = 1,
141 | };
142 | var memory = MemoryInstance.init(limits, null);
143 | defer memory.deinit();
144 | try expectEqual(memory.grow(25), true);
145 | try expectEqual(memory.size(), 25);
146 | try expectEqual(memory.grow(1), false);
147 | try expectEqual(memory.size(), 25);
148 | }
149 | }
150 |
151 | test "MemoryInstance.Internal.growAbsolute" {
152 | {
153 | const limits = Limits{
154 | .min = 0,
155 | .max = null,
156 | .limit_type = 0,
157 | };
158 | var memory = MemoryInstance.init(limits, null);
159 | defer memory.deinit();
160 | try expectEqual(memory.growAbsolute(0), true);
161 | try expectEqual(memory.size(), 0);
162 | try expectEqual(memory.growAbsolute(1), true);
163 | try expectEqual(memory.size(), 1);
164 | try expectEqual(memory.growAbsolute(5), true);
165 | try expectEqual(memory.size(), 5);
166 | try expectEqual(memory.growAbsolute(Limits.k_max_pages_i32), true);
167 | try expectEqual(memory.size(), Limits.k_max_pages_i32);
168 | }
169 |
170 | {
171 | const limits = Limits{
172 | .min = 0,
173 | .max = 25,
174 | .limit_type = 1,
175 | };
176 | var memory = MemoryInstance.init(limits, null);
177 | defer memory.deinit();
178 | try expectEqual(memory.growAbsolute(25), true);
179 | try expectEqual(memory.size(), 25);
180 | try expectEqual(memory.growAbsolute(26), false);
181 | try expectEqual(memory.size(), 25);
182 | }
183 | }
184 |
--------------------------------------------------------------------------------
/src/vm_register.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const assert = std.debug.assert;
3 |
4 | const builtin = @import("builtin");
5 |
6 | const AllocError = std.mem.Allocator.Error;
7 |
8 | const common = @import("common.zig");
9 | const StableArray = common.StableArray;
10 |
11 | const opcodes = @import("opcode.zig");
12 | const Opcode = opcodes.Opcode;
13 | const WasmOpcode = opcodes.WasmOpcode;
14 |
15 | const def = @import("definition.zig");
16 | pub const i8x16 = def.i8x16;
17 | pub const u8x16 = def.u8x16;
18 | pub const i16x8 = def.i16x8;
19 | pub const u16x8 = def.u16x8;
20 | pub const i32x4 = def.i32x4;
21 | pub const u32x4 = def.u32x4;
22 | pub const i64x2 = def.i64x2;
23 | pub const u64x2 = def.u64x2;
24 | pub const f32x4 = def.f32x4;
25 | pub const f64x2 = def.f64x2;
26 | pub const v128 = def.v128;
27 | const BlockImmediates = def.BlockImmediates;
28 | const BranchTableImmediates = def.BranchTableImmediates;
29 | const CallIndirectImmediates = def.CallIndirectImmediates;
30 | const ConstantExpression = def.ConstantExpression;
31 | const DataDefinition = def.DataDefinition;
32 | const ElementDefinition = def.ElementDefinition;
33 | const ElementMode = def.ElementMode;
34 | const FunctionDefinition = def.FunctionDefinition;
35 | const FunctionExport = def.FunctionExport;
36 | const FunctionHandle = def.FunctionHandle;
37 | const FunctionHandleType = def.FunctionHandleType;
38 | const FunctionTypeDefinition = def.FunctionTypeDefinition;
39 | const GlobalDefinition = def.GlobalDefinition;
40 | const GlobalMut = def.GlobalMut;
41 | const IfImmediates = def.IfImmediates;
42 | const ImportNames = def.ImportNames;
43 | const Instruction = def.Instruction;
44 | const Limits = def.Limits;
45 | const MemoryDefinition = def.MemoryDefinition;
46 | const MemoryOffsetAndLaneImmediates = def.MemoryOffsetAndLaneImmediates;
47 | const ModuleDefinition = def.ModuleDefinition;
48 | const NameCustomSection = def.NameCustomSection;
49 | const TableDefinition = def.TableDefinition;
50 | const TablePairImmediates = def.TablePairImmediates;
51 | const Val = def.Val;
52 | const ValType = def.ValType;
53 | const TaggedVal = def.TaggedVal;
54 |
55 | const inst = @import("instance.zig");
56 | const VM = inst.VM;
57 | const ModuleInstance = inst.ModuleInstance;
58 | const InvokeOpts = inst.InvokeOpts;
59 | const ResumeInvokeOpts = inst.ResumeInvokeOpts;
60 | const DebugTrapInstructionMode = inst.DebugTrapInstructionMode;
61 | const ModuleInstantiateOpts = inst.ModuleInstantiateOpts;
62 |
63 | const INVALID_INSTRUCTION_INDEX: u32 = std.math.maxInt(u32);
64 |
65 | // High-level strategy:
66 | // 1. Transform the ModuleDefinition's bytecode into a sea-of-nodes type of IR.
67 | // 2. Perform constant folding, and other peephole optimizations.
68 | // 3. Perform register allocation
69 | // 4. Generate new bytecode
70 | // 5. Implement the runtime instructions for the register-based bytecode
71 |
72 | const IRNode = struct {
73 | opcode: Opcode,
74 | is_phi: bool,
75 | instruction_index: u32,
76 | edges_in: ?[*]*IRNode,
77 | edges_in_count: u32,
78 | edges_out: ?[*]*IRNode,
79 | edges_out_count: u32,
80 |
81 | fn createWithInstruction(mir: *ModuleIR, instruction_index: u32) AllocError!*IRNode {
82 | const node: *IRNode = mir.ir.addOne() catch return AllocError.OutOfMemory;
83 | node.* = IRNode{
84 | .opcode = mir.module_def.code.instructions.items[instruction_index].opcode,
85 | .is_phi = false,
86 | .instruction_index = instruction_index,
87 | .edges_in = null,
88 | .edges_in_count = 0,
89 | .edges_out = null,
90 | .edges_out_count = 0,
91 | };
92 | return node;
93 | }
94 |
95 | fn createStandalone(mir: *ModuleIR, opcode: Opcode) AllocError!*IRNode {
96 | const node: *IRNode = mir.ir.addOne() catch return AllocError.OutOfMemory;
97 | node.* = IRNode{
98 | .opcode = opcode,
99 | .is_phi = false,
100 | .instruction_index = INVALID_INSTRUCTION_INDEX,
101 | .edges_in = null,
102 | .edges_in_count = 0,
103 | .edges_out = null,
104 | .edges_out_count = 0,
105 | };
106 | return node;
107 | }
108 |
109 | fn createPhi(mir: *ModuleIR) AllocError!*IRNode {
110 | const node: *IRNode = mir.ir.addOne() catch return AllocError.OutOfMemory;
111 | node.* = IRNode{
112 | .opcode = .Invalid,
113 | .is_phi = true,
114 | .instruction_index = 0,
115 | .edges_in = null,
116 | .edges_in_count = 0,
117 | .edges_out = null,
118 | .edges_out_count = 0,
119 | };
120 | return node;
121 | }
122 |
123 | fn deinit(node: IRNode, allocator: std.mem.Allocator) void {
124 | if (node.edges_in) |e| allocator.free(e[0..node.edges_in_count]);
125 | if (node.edges_out) |e| allocator.free(e[0..node.edges_out_count]);
126 | }
127 |
128 | fn instruction(node: IRNode, module_def: ModuleDefinition) ?*Instruction {
129 | return if (node.instruction_index != INVALID_INSTRUCTION_INDEX)
130 | &module_def.code.instructions.items[node.instruction_index]
131 | else
132 | null;
133 | }
134 |
135 | fn edgesIn(node: IRNode) []*IRNode {
136 | return if (node.edges_in) |e| e[0..node.edges_in_count] else &[0]*IRNode{};
137 | }
138 |
139 | fn edgesOut(node: IRNode) []*IRNode {
140 | return if (node.edges_out) |e| e[0..node.edges_out_count] else &[0]*IRNode{};
141 | }
142 |
143 | const EdgeDirection = enum {
144 | In,
145 | Out,
146 | };
147 |
148 | fn pushEdges(node: *IRNode, comptime direction: EdgeDirection, edges: []*IRNode, allocator: std.mem.Allocator) AllocError!void {
149 | const existing = if (direction == .In) node.edgesIn() else node.edgesOut();
150 | var new = try allocator.alloc(*IRNode, existing.len + edges.len);
151 | @memcpy(new[0..existing.len], existing);
152 | @memcpy(new[existing.len .. existing.len + edges.len], edges);
153 | if (existing.len > 0) {
154 | allocator.free(existing);
155 | }
156 | switch (direction) {
157 | .In => {
158 | node.edges_in = new.ptr;
159 | node.edges_in_count = @intCast(new.len);
160 | },
161 | .Out => {
162 | node.edges_out = new.ptr;
163 | node.edges_out_count = @intCast(new.len);
164 | },
165 | }
166 |
167 | if (node.is_phi) {
168 | std.debug.assert(node.edges_in_count <= 2);
169 | std.debug.assert(node.edges_out_count <= 1);
170 | }
171 | }
172 |
173 | fn hasSideEffects(node: *IRNode) bool {
174 | // We define a side-effect instruction as any that could affect the Store or control flow
175 | return switch (node.opcode) {
176 | .Call => true,
177 | else => false,
178 | };
179 | }
180 |
181 | fn isFlowControl(node: *IRNode) bool {
182 | return switch (node.opcode) {
183 | .If,
184 | .IfNoElse,
185 | .Else,
186 | .Return,
187 | .Branch,
188 | .Branch_If,
189 | .Branch_Table,
190 | => true,
191 | else => false,
192 | };
193 | }
194 |
195 | fn needsRegisterSlot(node: *IRNode) bool {
196 | // TODO fill this out
197 | return switch (node.opcode) {
198 | .If,
199 | .IfNoElse,
200 | .Else,
201 | .Return,
202 | .Branch,
203 | .Branch_If,
204 | .Branch_Table,
205 | => false,
206 | else => true,
207 | };
208 | }
209 |
210 | fn numRegisterSlots(node: *IRNode) u32 {
211 | return switch (node.opcode) {
212 | .If,
213 | .IfNoElse,
214 | .Else,
215 | .Return,
216 | .Branch,
217 | .Branch_If,
218 | .Branch_Table,
219 | => 0,
220 | else => 1,
221 | };
222 | }
223 |
224 | // a node that has no out edges to instructions with side effects or control flow
225 | fn isIsland(node: *IRNode, unvisited: *std.ArrayList(*IRNode)) AllocError!bool {
226 | if (node.opcode == .Return) {
227 | return false;
228 | }
229 |
230 | unvisited.clearRetainingCapacity();
231 |
232 | for (node.edgesOut()) |edge| {
233 | try unvisited.append(edge);
234 | }
235 |
236 | while (unvisited.items.len > 0) {
237 | var next: *IRNode = unvisited.pop();
238 | if (next.opcode == .Return or next.hasSideEffects() or node.isFlowControl()) {
239 | return false;
240 | }
241 | for (next.edgesOut()) |edge| {
242 | try unvisited.append(edge);
243 | }
244 | }
245 |
246 | unvisited.clearRetainingCapacity();
247 |
248 | return true;
249 | }
250 | };
251 |
252 | const RegisterSlots = struct {
253 | const Slot = struct {
254 | node: ?*IRNode,
255 | prev: ?u32,
256 | };
257 |
258 | slots: std.ArrayList(Slot),
259 | last_free: ?u32,
260 |
261 | fn init(allocator: std.mem.Allocator) RegisterSlots {
262 | return RegisterSlots{
263 | .slots = std.ArrayList(Slot).init(allocator),
264 | .last_free = null,
265 | };
266 | }
267 |
268 | fn deinit(self: *RegisterSlots) void {
269 | self.slots.deinit();
270 | }
271 |
272 | fn alloc(self: *RegisterSlots, node: *IRNode) AllocError!u32 {
273 | if (self.last_free == null) {
274 | self.last_free = @intCast(self.slots.items.len);
275 | try self.slots.append(Slot{
276 | .node = null,
277 | .prev = null,
278 | });
279 | }
280 |
281 | const index = self.last_free.?;
282 | var slot: *Slot = &self.slots.items[index];
283 | self.last_free = slot.prev;
284 | slot.node = node;
285 | slot.prev = null;
286 |
287 | std.debug.print("pushed node {*} with opcode {} to index {}\n", .{ node, node.opcode, index });
288 |
289 | return index;
290 | }
291 |
292 | fn freeAt(self: *RegisterSlots, node: *IRNode, index: u32) void {
293 | var succes: bool = false;
294 | var slot: *Slot = &self.slots.items[index];
295 | if (slot.node == node) {
296 | slot.node = null;
297 | slot.prev = self.last_free;
298 | self.last_free = index;
299 | succes = true;
300 | }
301 |
302 | std.debug.print("attempting to free node {*} with opcode {} at index {}: {}\n", .{ node, node.opcode, index, succes });
303 | }
304 | };
305 |
306 | const IRFunction = struct {
307 | definition_index: usize,
308 | ir_root: *IRNode,
309 |
310 | register_map: std.AutoHashMap(*const IRNode, u32),
311 |
312 | fn init(definition_index: u32, ir_root: *IRNode, allocator: std.mem.Allocator) IRFunction {
313 | return IRFunction{
314 | .definition_index = definition_index,
315 | .ir_root = ir_root,
316 | .register_map = std.AutoHashMap(*const IRNode, u32).init(allocator),
317 | };
318 | }
319 |
320 | fn deinit(self: *IRFunction) void {
321 | self.register_map.deinit();
322 | }
323 |
324 | fn definition(func: IRFunction, module_def: ModuleDefinition) *FunctionDefinition {
325 | return &module_def.functions.items[func.definition_index];
326 | }
327 |
328 | fn regalloc(func: *IRFunction, allocator: std.mem.Allocator) AllocError!void {
329 | std.debug.assert(func.ir_root.opcode == .Return); // TODO need to update other places in the code to ensure this is a thing
330 |
331 | var slots = RegisterSlots.init(allocator);
332 | defer slots.deinit();
333 |
334 | var visit_queue = std.ArrayList(*IRNode).init(allocator);
335 | defer visit_queue.deinit();
336 | try visit_queue.append(func.ir_root);
337 |
338 | var visited = std.AutoHashMap(*IRNode, void).init(allocator);
339 | defer visited.deinit();
340 |
341 | while (visit_queue.items.len > 0) {
342 | var node: *IRNode = visit_queue.orderedRemove(0); // visit the graph in breadth-first order (FIFO queue)
343 | try visited.put(node, {});
344 |
345 | // mark output node slots as free - this is safe because the dataflow graph flows one way and the
346 | // output can't be reused higher up in the graph
347 | for (node.edgesOut()) |output_node| {
348 | if (func.register_map.get(output_node)) |index| {
349 | slots.freeAt(output_node, index);
350 | }
351 | }
352 |
353 | // allocate slots for this instruction
354 | // TODO handle multiple output slots (e.g. results of a function call)
355 | if (node.needsRegisterSlot()) {
356 | const index: u32 = try slots.alloc(node);
357 | try func.register_map.put(node, index);
358 | }
359 |
360 | // add inputs to the FIFO visit queue
361 | for (node.edgesIn()) |input_node| {
362 | if (visited.contains(input_node) == false) {
363 | try visit_queue.append(input_node);
364 | }
365 | }
366 | }
367 | }
368 |
369 | fn codegen(func: *IRFunction, instructions: *std.ArrayList(RegInstruction), module_def: ModuleDefinition, allocator: std.mem.Allocator) AllocError!void {
370 | // walk the graph in breadth-first order
371 |
372 | // when a node is visited, emit its instruction
373 | // reverse the instructions array when finished (alternatively just emit in reverse order if we have the node count from regalloc)
374 |
375 | const start_instruction_offset = instructions.items.len;
376 |
377 | var visit_queue = std.ArrayList(*IRNode).init(allocator);
378 | defer visit_queue.deinit();
379 | try visit_queue.append(func.ir_root);
380 |
381 | var visited = std.AutoHashMap(*IRNode, void).init(allocator);
382 | defer visited.deinit();
383 |
384 | while (visit_queue.items.len > 0) {
385 | var node: *IRNode = visit_queue.orderedRemove(0); // visit the graph in breadth-first order (FIFO queue)
386 |
387 | // only emit an instruction once all its out edges have been visited - this ensures all dependent instructions
388 | // will be executed after this one
389 | var all_out_edges_visited: bool = true;
390 | for (node.edgesOut()) |output_node| {
391 | if (visited.contains(output_node) == false) {
392 | all_out_edges_visited = false;
393 | break;
394 | }
395 | }
396 |
397 | if (all_out_edges_visited) {
398 | try visited.put(node, {});
399 |
400 | instructions.append(RegInstruction{
401 | .registerSlotOffset = if (func.register_map.get(node)) |slot_index| slot_index else 0,
402 | .opcode = node.opcode,
403 | .immediate = node.instruction(module_def).?.immediate,
404 | });
405 | }
406 |
407 | for (node.edgesIn()) |input_node| {
408 | if (!visited.contains(input_node)) { // TODO do we need this?
409 | try visit_queue.append(input_node);
410 | }
411 | }
412 | }
413 |
414 | const end_instruction_offset = instructions.items.len;
415 | const emitted_instructions = instructions.items[start_instruction_offset..end_instruction_offset];
416 |
417 | std.mem.reverse(RegInstruction, emitted_instructions);
418 | }
419 |
420 | fn dumpVizGraph(func: IRFunction, path: []u8, module_def: ModuleDefinition, allocator: std.mem.Allocator) !void {
421 | var graph_txt = std.ArrayList(u8).init(allocator);
422 | defer graph_txt.deinit();
423 | try graph_txt.ensureTotalCapacity(1024 * 16);
424 |
425 | var writer = graph_txt.writer();
426 | _ = try writer.write("digraph {\n");
427 |
428 | var nodes = std.ArrayList(*const IRNode).init(allocator);
429 | defer nodes.deinit();
430 | try nodes.ensureTotalCapacity(1024);
431 | nodes.appendAssumeCapacity(func.ir_root);
432 |
433 | var visited = std.AutoHashMap(*IRNode, void).init(allocator);
434 | defer visited.deinit();
435 | try visited.put(func.ir_root, {});
436 |
437 | while (nodes.items.len > 0) {
438 | const n: *const IRNode = nodes.pop();
439 | const opcode: Opcode = n.opcode;
440 | const instruction = n.instruction(module_def);
441 |
442 | var label_buffer: [256]u8 = undefined;
443 | const label = switch (opcode) {
444 | .I32_Const => std.fmt.bufPrint(&label_buffer, ": {}", .{instruction.?.immediate.ValueI32}) catch unreachable,
445 | .I64_Const => std.fmt.bufPrint(&label_buffer, ": {}", .{instruction.?.immediate.ValueI64}) catch unreachable,
446 | .F32_Const => std.fmt.bufPrint(&label_buffer, ": {}", .{instruction.?.immediate.ValueF32}) catch unreachable,
447 | .F64_Const => std.fmt.bufPrint(&label_buffer, ": {}", .{instruction.?.immediate.ValueF64}) catch unreachable,
448 | .Call => std.fmt.bufPrint(&label_buffer, ": func {}", .{instruction.?.immediate.Index}) catch unreachable,
449 | .Local_Get, .Local_Set, .Local_Tee => std.fmt.bufPrint(&label_buffer, ": {}", .{instruction.?.immediate.Index}) catch unreachable,
450 | else => &[0]u8{},
451 | };
452 |
453 | var register_buffer: [64]u8 = undefined;
454 | const register = blk: {
455 | if (func.register_map.get(n)) |slot| {
456 | break :blk std.fmt.bufPrint(®ister_buffer, " @reg {}", .{slot}) catch unreachable;
457 | } else {
458 | break :blk &[0]u8{};
459 | }
460 | };
461 |
462 | try writer.print("\"{*}\" [label=\"{}{s}{s}\"]\n", .{ n, opcode, label, register });
463 |
464 | for (n.edgesOut()) |e| {
465 | try writer.print("\"{*}\" -> \"{*}\"\n", .{ n, e });
466 |
467 | if (!visited.contains(e)) {
468 | try nodes.append(e);
469 | try visited.put(e, {});
470 | }
471 | }
472 |
473 | for (n.edgesIn()) |e| {
474 | if (!visited.contains(e)) {
475 | try nodes.append(e);
476 | try visited.put(e, {});
477 | }
478 | }
479 | }
480 |
481 | _ = try writer.write("}\n");
482 |
483 | try std.fs.cwd().writeFile(path, graph_txt.items);
484 | }
485 | };
486 |
487 | const ModuleIR = struct {
488 | const BlockStack = struct {
489 | const Block = struct {
490 | node_start_index: u32,
491 | continuation: u32, // in instruction index space
492 | phi_nodes: []*IRNode,
493 | };
494 |
495 | nodes: std.ArrayList(*IRNode),
496 | blocks: std.ArrayList(Block),
497 | phi_nodes: std.ArrayList(*IRNode),
498 |
499 | // const ContinuationType = enum {
500 | // .Normal,
501 | // .Loop,
502 | // };
503 |
504 | fn init(allocator: std.mem.Allocator) BlockStack {
505 | return BlockStack{
506 | .nodes = std.ArrayList(*IRNode).init(allocator),
507 | .blocks = std.ArrayList(Block).init(allocator),
508 | .phi_nodes = std.ArrayList(*IRNode).init(allocator),
509 | };
510 | }
511 |
512 | fn deinit(self: BlockStack) void {
513 | self.nodes.deinit();
514 | self.blocks.deinit();
515 | }
516 |
517 | fn pushBlock(self: *BlockStack, continuation: u32) AllocError!void {
518 | try self.blocks.append(Block{
519 | .node_start_index = @intCast(self.nodes.items.len),
520 | .continuation = continuation,
521 | .phi_nodes = &[_]*IRNode{},
522 | });
523 | }
524 |
525 | fn pushBlockWithPhi(self: *BlockStack, continuation: u32, phi_nodes: []*IRNode) AllocError!void {
526 | const start_slice_index = self.phi_nodes.items.len;
527 | try self.phi_nodes.appendSlice(phi_nodes);
528 |
529 | try self.blocks.append(Block{
530 | .node_start_index = @intCast(self.nodes.items.len),
531 | .continuation = continuation,
532 | .phi_nodes = self.phi_nodes.items[start_slice_index..],
533 | });
534 | }
535 |
536 | fn pushNode(self: *BlockStack, node: *IRNode) AllocError!void {
537 | try self.nodes.append(node);
538 | }
539 |
540 | fn popBlock(self: *BlockStack) void {
541 | const block: Block = self.blocks.pop();
542 |
543 | std.debug.assert(block.node_start_index <= self.nodes.items.len);
544 |
545 | // should never grow these arrays
546 | self.nodes.resize(block.node_start_index) catch unreachable;
547 | self.phi_nodes.resize(self.phi_nodes.items.len - block.phi_nodes.len) catch unreachable;
548 | }
549 |
550 | fn currentBlockNodes(self: *BlockStack) []*IRNode {
551 | // std.debug.print(">>>>>>>> num block: {}\n", .{self.blocks.items.len});
552 | const index: u32 = self.blocks.items[self.blocks.items.len - 1].node_start_index;
553 | return self.nodes.items[index..];
554 | }
555 |
556 | fn reset(self: *BlockStack) void {
557 | self.nodes.clearRetainingCapacity();
558 | self.blocks.clearRetainingCapacity();
559 | }
560 | };
561 |
562 | const IntermediateCompileData = struct {
563 | const UniqueValueToIRNodeMap = std.HashMap(TaggedVal, *IRNode, TaggedVal.HashMapContext, std.hash_map.default_max_load_percentage);
564 |
565 | const PendingContinuationEdge = struct {
566 | continuation: u32,
567 | node: *IRNode,
568 | };
569 |
570 | allocator: std.mem.Allocator,
571 |
572 | // all_nodes: std.ArrayList(*IRNode),
573 |
574 | blocks: BlockStack,
575 |
576 | // This stack is a record of the nodes to push values onto the stack. If an instruction would push
577 | // multiple values onto the stack, it would be in this list as many times as values it pushed. Note
578 | // that we don't have to do any type checking here because the module has already been validated.
579 | value_stack: std.ArrayList(*IRNode),
580 |
581 | // records the current block continuation
582 | // label_continuations: std.ArrayList(u32),
583 |
584 | pending_continuation_edges: std.ArrayList(PendingContinuationEdge),
585 |
586 | // when hitting an unconditional control transfer, we need to mark the rest of the stack values as unreachable just like in validation
587 | is_unreachable: bool,
588 |
589 | // This is a bit weird - since the Local_* instructions serve to just manipulate the locals into the stack,
590 | // we need a way to represent what's in the locals slot as an SSA node. This array lets us do that. We also
591 | // reuse the Local_Get instructions to indicate the "initial value" of the slot. Since our IRNode only stores
592 | // indices to instructions, we'll just lazily set these when they're fetched for the first time.
593 | locals: std.ArrayList(?*IRNode),
594 |
595 | // Lets us collapse multiple const IR nodes with the same type/value into a single one
596 | unique_constants: UniqueValueToIRNodeMap,
597 |
598 | scratch_node_list_1: std.ArrayList(*IRNode),
599 | scratch_node_list_2: std.ArrayList(*IRNode),
600 |
601 | fn init(allocator: std.mem.Allocator) IntermediateCompileData {
602 | return IntermediateCompileData{
603 | .allocator = allocator,
604 | // .all_nodes = std.ArrayList(*IRNode).init(allocator),
605 | .blocks = BlockStack.init(allocator),
606 | .value_stack = std.ArrayList(*IRNode).init(allocator),
607 | // .label_continuations = std.ArrayList(u32).init(allocator),
608 | .pending_continuation_edges = std.ArrayList(PendingContinuationEdge).init(allocator),
609 | .is_unreachable = false,
610 | .locals = std.ArrayList(?*IRNode).init(allocator),
611 | .unique_constants = UniqueValueToIRNodeMap.init(allocator),
612 | .scratch_node_list_1 = std.ArrayList(*IRNode).init(allocator),
613 | .scratch_node_list_2 = std.ArrayList(*IRNode).init(allocator),
614 | };
615 | }
616 |
617 | fn warmup(self: *IntermediateCompileData, func_def: FunctionDefinition, module_def: ModuleDefinition) AllocError!void {
618 | try self.locals.appendNTimes(null, func_def.numParamsAndLocals(module_def));
619 | try self.scratch_node_list_1.ensureTotalCapacity(4096);
620 | try self.scratch_node_list_2.ensureTotalCapacity(4096);
621 | // try self.label_continuations.append(func_def.continuation);
622 | self.is_unreachable = false;
623 | }
624 |
625 | fn reset(self: *IntermediateCompileData) void {
626 | // self.all_nodes.clearRetainingCapacity();
627 | self.blocks.reset();
628 | self.value_stack.clearRetainingCapacity();
629 | // self.label_continuations.clearRetainingCapacity();
630 | self.pending_continuation_edges.clearRetainingCapacity();
631 | self.locals.clearRetainingCapacity();
632 | self.unique_constants.clearRetainingCapacity();
633 | self.scratch_node_list_1.clearRetainingCapacity();
634 | self.scratch_node_list_2.clearRetainingCapacity();
635 | }
636 |
637 | fn deinit(self: *IntermediateCompileData) void {
638 | // self.all_nodes.deinit();
639 | self.blocks.deinit();
640 | self.value_stack.deinit();
641 | // self.label_continuations.deinit();
642 | self.pending_continuation_edges.deinit();
643 | self.locals.deinit();
644 | self.unique_constants.deinit();
645 | self.scratch_node_list_1.deinit();
646 | self.scratch_node_list_2.deinit();
647 | }
648 |
649 | fn popPushValueStackNodes(self: *IntermediateCompileData, node: *IRNode, num_consumed: usize, num_pushed: usize) AllocError!void {
650 | if (self.is_unreachable) {
651 | return;
652 | }
653 |
654 | var edges_buffer: [8]*IRNode = undefined; // 8 should be more stack slots than any one instruction can pop
655 | std.debug.assert(num_consumed <= edges_buffer.len);
656 |
657 | const edges = edges_buffer[0..num_consumed];
658 | for (edges) |*e| {
659 | e.* = self.value_stack.pop();
660 | }
661 | try node.pushEdges(.In, edges, self.allocator);
662 | for (edges) |e| {
663 | var consumer_edges = [_]*IRNode{node};
664 | try e.pushEdges(.Out, &consumer_edges, self.allocator);
665 | }
666 | try self.value_stack.appendNTimes(node, num_pushed);
667 | }
668 |
669 | fn foldConstant(self: *IntermediateCompileData, mir: *ModuleIR, comptime valtype: ValType, instruction_index: u32, instruction: Instruction) AllocError!*IRNode {
670 | var val: TaggedVal = undefined;
671 | val.type = valtype;
672 | val.val = switch (valtype) {
673 | .I32 => Val{ .I32 = instruction.immediate.ValueI32 },
674 | .I64 => Val{ .I64 = instruction.immediate.ValueI64 },
675 | .F32 => Val{ .F32 = instruction.immediate.ValueF32 },
676 | .F64 => Val{ .F64 = instruction.immediate.ValueF64 },
677 | .V128 => Val{ .V128 = instruction.immediate.ValueVec },
678 | else => @compileError("Unsupported const instruction"),
679 | };
680 |
681 | const res = try self.unique_constants.getOrPut(val);
682 | if (res.found_existing == false) {
683 | const node = try IRNode.createWithInstruction(mir, instruction_index);
684 | res.value_ptr.* = node;
685 | }
686 | if (self.is_unreachable == false) {
687 | try self.value_stack.append(res.value_ptr.*);
688 | }
689 | return res.value_ptr.*;
690 | }
691 |
692 | fn addPendingEdgeLabel(self: *IntermediateCompileData, node: *IRNode, label_id: u32) !void {
693 | const last_block_index = self.blocks.blocks.items.len - 1;
694 | const continuation: u32 = self.blocks.blocks.items[last_block_index - label_id].continuation;
695 | try self.pending_continuation_edges.append(PendingContinuationEdge{
696 | .node = node,
697 | .continuation = continuation,
698 | });
699 | }
700 |
701 | fn addPendingEdgeContinuation(self: *IntermediateCompileData, node: *IRNode, continuation: u32) !void {
702 | try self.pending_continuation_edges.append(PendingContinuationEdge{
703 | .node = node,
704 | .continuation = continuation,
705 | });
706 | }
707 | };
708 |
709 | allocator: std.mem.Allocator,
710 | module_def: *const ModuleDefinition,
711 | functions: std.ArrayList(IRFunction),
712 | ir: StableArray(IRNode),
713 |
714 | // instructions: std.ArrayList(RegInstruction),
715 |
716 | fn init(allocator: std.mem.Allocator, module_def: *const ModuleDefinition) ModuleIR {
717 | return ModuleIR{
718 | .allocator = allocator,
719 | .module_def = module_def,
720 | .functions = std.ArrayList(IRFunction).init(allocator),
721 | .ir = StableArray(IRNode).init(1024 * 1024 * 8),
722 | };
723 | }
724 |
725 | fn deinit(mir: *ModuleIR) void {
726 | for (mir.functions.items) |*func| {
727 | func.deinit();
728 | }
729 | mir.functions.deinit();
730 | for (mir.ir.items) |node| {
731 | node.deinit(mir.allocator);
732 | }
733 | mir.ir.deinit();
734 | }
735 |
736 | fn compile(mir: *ModuleIR) AllocError!void {
737 | var compile_data = IntermediateCompileData.init(mir.allocator);
738 | defer compile_data.deinit();
739 |
740 | for (0..mir.module_def.functions.items.len) |i| {
741 | std.debug.print("mir.module_def.functions.items.len: {}, i: {}\n\n", .{ mir.module_def.functions.items.len, i });
742 | try mir.compileFunc(i, &compile_data);
743 |
744 | compile_data.reset();
745 | }
746 | }
747 |
748 | fn compileFunc(mir: *ModuleIR, index: usize, compile_data: *IntermediateCompileData) AllocError!void {
749 | const UniqueValueToIRNodeMap = std.HashMap(TaggedVal, *IRNode, TaggedVal.HashMapContext, std.hash_map.default_max_load_percentage);
750 |
751 | const Helpers = struct {
752 | fn opcodeHasDefaultIRMapping(opcode: Opcode) bool {
753 | return switch (opcode) {
754 | .Noop,
755 | .Block,
756 | .Loop,
757 | .End,
758 | .Drop,
759 | .I32_Const,
760 | .I64_Const,
761 | .F32_Const,
762 | .F64_Const,
763 | .Local_Get,
764 | .Local_Set,
765 | .Local_Tee,
766 | => false,
767 | else => true,
768 | };
769 | }
770 | };
771 |
772 | const func: *const FunctionDefinition = &mir.module_def.functions.items[index];
773 | const func_type: *const FunctionTypeDefinition = func.typeDefinition(mir.module_def.*);
774 |
775 | std.debug.print("compiling func index {}\n", .{index});
776 |
777 | try compile_data.warmup(func.*, mir.module_def.*);
778 |
779 | try compile_data.blocks.pushBlock(func.continuation);
780 |
781 | var locals = compile_data.locals.items; // for convenience later
782 |
783 | // Lets us collapse multiple const IR nodes with the same type/value into a single one
784 | var unique_constants = UniqueValueToIRNodeMap.init(mir.allocator);
785 | defer unique_constants.deinit();
786 |
787 | const instructions: []Instruction = func.instructions(mir.module_def.*);
788 | if (instructions.len == 0) {
789 | std.log.warn("Skipping function with no instructions (index {}).", .{index});
790 | return;
791 | }
792 |
793 | var ir_root: ?*IRNode = null;
794 |
795 | for (instructions, 0..) |instruction, local_instruction_index| {
796 | const instruction_index: u32 = @intCast(func.instructions_begin + local_instruction_index);
797 |
798 | var node: ?*IRNode = null;
799 | if (Helpers.opcodeHasDefaultIRMapping(instruction.opcode)) {
800 | node = try IRNode.createWithInstruction(mir, instruction_index);
801 | }
802 |
803 | std.debug.print("opcode: {}\n", .{instruction.opcode});
804 |
805 | switch (instruction.opcode) {
806 | // .Loop => {
807 | // instruction.
808 | // },
809 | // .If => {},
810 | .Block => {
811 | // compile_data.label_stack += 1;
812 |
813 | // try compile_data.label_stack.append(node);
814 | // try compile_data.label_continuations.append(instruction.immediate.Block.continuation);
815 | try compile_data.blocks.pushBlock(instruction.immediate.Block.continuation);
816 | },
817 | .Loop => {
818 | // compile_data.label_stack += 1;
819 | // compile_data.label_stack.append(node);
820 | // try compile_data.label_continuations.append(instruction.immediate.Block.continuation);
821 | try compile_data.blocks.pushBlock(instruction.immediate.Block.continuation); // TODO record the kind of block so we know this is a loop?
822 | },
823 | .If => {
824 | var phi_nodes: *std.ArrayList(*IRNode) = &compile_data.scratch_node_list_1;
825 | defer compile_data.scratch_node_list_1.clearRetainingCapacity();
826 |
827 | std.debug.assert(phi_nodes.items.len == 0);
828 |
829 | for (0..instruction.immediate.If.num_returns) |_| {
830 | try phi_nodes.append(try IRNode.createPhi(mir));
831 | }
832 |
833 | try compile_data.blocks.pushBlockWithPhi(instruction.immediate.If.end_continuation, phi_nodes.items[0..]);
834 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.end_continuation + 1);
835 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.else_continuation);
836 |
837 | try compile_data.popPushValueStackNodes(node.?, 1, 0);
838 |
839 | // after the if consumes the value it needs, push the phi nodes on since these will be the return values
840 | // of the block
841 | try compile_data.value_stack.appendSlice(phi_nodes.items);
842 | },
843 | .IfNoElse => {
844 | try compile_data.blocks.pushBlock(instruction.immediate.If.end_continuation);
845 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.end_continuation + 1);
846 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.else_continuation);
847 | try compile_data.popPushValueStackNodes(node.?, 1, 0);
848 |
849 | // TODO figure out if there needs to be any phi nodes and if so what two inputs they have
850 | },
851 | .Else => {
852 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.end_continuation + 1);
853 | try compile_data.addPendingEdgeContinuation(node.?, instruction.immediate.If.else_continuation);
854 |
855 | // TODO hook up the phi nodes with the stuffs
856 | },
857 | .End => {
858 | // TODO finish up anything with phi nodes?
859 |
860 | // the last End opcode returns the values on the stack
861 | // if (compile_data.label_continuations.items.len == 1) {
862 | if (compile_data.blocks.blocks.items.len == 1) {
863 | node = try IRNode.createStandalone(mir, .Return);
864 | try compile_data.popPushValueStackNodes(node.?, func_type.getReturns().len, 0);
865 | // _ = compile_data.label_continuations.pop();
866 | }
867 |
868 | // At the end of every block, we ensure all nodes with side effects are still in the graph. Order matters
869 | // since mutations to the Store or control flow changes must happen in the order of the original instructions.
870 | {
871 | var nodes_with_side_effects: *std.ArrayList(*IRNode) = &compile_data.scratch_node_list_1;
872 | defer nodes_with_side_effects.clearRetainingCapacity();
873 |
874 | const current_block_nodes: []*IRNode = compile_data.blocks.currentBlockNodes();
875 |
876 | for (current_block_nodes) |block_node| {
877 | if (block_node.hasSideEffects() or block_node.isFlowControl()) {
878 | try nodes_with_side_effects.append(block_node);
879 | }
880 | }
881 |
882 | if (nodes_with_side_effects.items.len >= 2) {
883 | var i: i32 = @intCast(nodes_with_side_effects.items.len - 2);
884 | while (i >= 0) : (i -= 1) {
885 | const ii: u32 = @intCast(i);
886 | var node_a: *IRNode = nodes_with_side_effects.items[ii];
887 | if (try node_a.isIsland(&compile_data.scratch_node_list_2)) {
888 | var node_b: *IRNode = nodes_with_side_effects.items[ii + 1];
889 |
890 | var in_edges = [_]*IRNode{node_b};
891 | try node_a.pushEdges(.Out, &in_edges, compile_data.allocator);
892 |
893 | var out_edges = [_]*IRNode{node_a};
894 | try node_b.pushEdges(.In, &out_edges, compile_data.allocator);
895 | }
896 | }
897 | }
898 | }
899 |
900 | compile_data.blocks.popBlock();
901 | },
902 | .Branch => {
903 | try compile_data.addPendingEdgeLabel(node.?, instruction.immediate.LabelId);
904 | compile_data.is_unreachable = true;
905 | },
906 | .Branch_If => {
907 | try compile_data.popPushValueStackNodes(node.?, 1, 0);
908 | },
909 | .Branch_Table => {
910 | assert(node != null);
911 |
912 | try compile_data.popPushValueStackNodes(node.?, 1, 0);
913 |
914 | // var continuation_edges: std.ArrayList(*IRNode).init(allocator);
915 | // defer continuation_edges.deinit();
916 |
917 | const immediates: *const BranchTableImmediates = &mir.module_def.code.branch_table.items[instruction.immediate.Index];
918 |
919 | try compile_data.addPendingEdgeLabel(node.?, immediates.fallback_id);
920 | for (immediates.label_ids.items) |continuation| {
921 | try compile_data.addPendingEdgeLabel(node.?, continuation);
922 | }
923 |
924 | compile_data.is_unreachable = true;
925 |
926 | // try label_ids.append(immediates.fallback_id);
927 | // try label_ids.appendSlice(immediates.label_ids.items);
928 |
929 | // node.pushEdges(.Out, )
930 | // TODO need to somehow connect to the various labels it wants to jump to?
931 | },
932 | .Return => {
933 | try compile_data.popPushValueStackNodes(node.?, func_type.getReturns().len, 0);
934 | compile_data.is_unreachable = true;
935 | },
936 | .Call => {
937 | const calling_func_def: *const FunctionDefinition = &mir.module_def.functions.items[index];
938 | const calling_func_type: *const FunctionTypeDefinition = calling_func_def.typeDefinition(mir.module_def.*);
939 | const num_returns: usize = calling_func_type.getReturns().len;
940 | const num_params: usize = calling_func_type.getParams().len;
941 |
942 | try compile_data.popPushValueStackNodes(node.?, num_params, num_returns);
943 | },
944 | // .Call_Indirect
945 | .Drop => {
946 | if (compile_data.is_unreachable == false) {
947 | _ = compile_data.value_stack.pop();
948 | }
949 | },
950 | .I32_Const => {
951 | assert(node == null);
952 | node = try compile_data.foldConstant(mir, .I32, instruction_index, instruction);
953 | },
954 | .I64_Const => {
955 | assert(node == null);
956 | node = try compile_data.foldConstant(mir, .I64, instruction_index, instruction);
957 | },
958 | .F32_Const => {
959 | assert(node == null);
960 | node = try compile_data.foldConstant(mir, .F32, instruction_index, instruction);
961 | },
962 | .F64_Const => {
963 | assert(node == null);
964 | node = try compile_data.foldConstant(mir, .F64, instruction_index, instruction);
965 | },
966 | .I32_Eq,
967 | .I32_NE,
968 | .I32_LT_S,
969 | .I32_LT_U,
970 | .I32_GT_S,
971 | .I32_GT_U,
972 | .I32_LE_S,
973 | .I32_LE_U,
974 | .I32_GE_S,
975 | .I32_GE_U,
976 | .I32_Add,
977 | .I32_Sub,
978 | .I32_Mul,
979 | .I32_Div_S,
980 | .I32_Div_U,
981 | .I32_Rem_S,
982 | .I32_Rem_U,
983 | .I32_And,
984 | .I32_Or,
985 | .I32_Xor,
986 | .I32_Shl,
987 | .I32_Shr_S,
988 | .I32_Shr_U,
989 | .I32_Rotl,
990 | .I32_Rotr,
991 | // TODO add a lot more of these simpler opcodes
992 | => {
993 | try compile_data.popPushValueStackNodes(node.?, 2, 1);
994 | },
995 | .I32_Eqz,
996 | .I32_Clz,
997 | .I32_Ctz,
998 | .I32_Popcnt,
999 | .I32_Extend8_S,
1000 | .I32_Extend16_S,
1001 | .I64_Clz,
1002 | .I64_Ctz,
1003 | .I64_Popcnt,
1004 | .F32_Neg,
1005 | .F64_Neg,
1006 | => {
1007 | try compile_data.popPushValueStackNodes(node.?, 1, 1);
1008 | },
1009 | .Local_Get => {
1010 | assert(node == null);
1011 |
1012 | if (compile_data.is_unreachable == false) {
1013 | const local: *?*IRNode = &locals[instruction.immediate.Index];
1014 | if (local.* == null) {
1015 | local.* = try IRNode.createWithInstruction(mir, instruction_index);
1016 | }
1017 | node = local.*;
1018 | try compile_data.value_stack.append(node.?);
1019 | }
1020 | },
1021 | .Local_Set => {
1022 | assert(node == null);
1023 |
1024 | if (compile_data.is_unreachable == false) {
1025 | const n: *IRNode = compile_data.value_stack.pop();
1026 | locals[instruction.immediate.Index] = n;
1027 | }
1028 | },
1029 | .Local_Tee => {
1030 | assert(node == null);
1031 | if (compile_data.is_unreachable == false) {
1032 | const n: *IRNode = compile_data.value_stack.items[compile_data.value_stack.items.len - 1];
1033 | locals[instruction.immediate.Index] = n;
1034 | }
1035 | },
1036 | else => {
1037 | std.log.warn("skipping node {}", .{instruction.opcode});
1038 | },
1039 | }
1040 |
1041 | // resolve any pending continuations with the current node.
1042 | if (node) |current_node| {
1043 | var i: usize = 0;
1044 | while (i < compile_data.pending_continuation_edges.items.len) {
1045 | var pending: *IntermediateCompileData.PendingContinuationEdge = &compile_data.pending_continuation_edges.items[i];
1046 |
1047 | if (pending.continuation == instruction_index) {
1048 | var out_edges = [_]*IRNode{current_node};
1049 | try pending.node.pushEdges(.Out, &out_edges, compile_data.allocator);
1050 |
1051 | var in_edges = [_]*IRNode{pending.node};
1052 | try current_node.pushEdges(.In, &in_edges, compile_data.allocator);
1053 |
1054 | _ = compile_data.pending_continuation_edges.swapRemove(i);
1055 | } else {
1056 | i += 1;
1057 | }
1058 | }
1059 |
1060 | // try compile_data.all_nodes.append(current_node);
1061 |
1062 | try compile_data.blocks.pushNode(current_node);
1063 | }
1064 |
1065 | // TODO don't assume only one return node - there can be multiple in real functions
1066 | if (node) |n| {
1067 | if (n.opcode == .Return) {
1068 | std.debug.assert(ir_root == null);
1069 | ir_root = node;
1070 | }
1071 | }
1072 | }
1073 |
1074 | // resolve any nodes that have side effects that somehow became isolated
1075 | // TODO will have to stress test this with a bunch of different cases of nodes
1076 | // for (compile_data.all_nodes.items[0 .. compile_data.all_nodes.items.len - 1]) |node| {
1077 | // if (node.hasSideEffects()) {
1078 | // if (try node.isIsland(&compile_data.scratch_node_list_1)) {
1079 | // var last_node: *IRNode = compile_data.all_nodes.items[compile_data.all_nodes.items.len - 1];
1080 |
1081 | // var out_edges = [_]*IRNode{last_node};
1082 | // try node.pushEdges(.Out, &out_edges, compile_data.allocator);
1083 |
1084 | // var in_edges = [_]*IRNode{node};
1085 | // try last_node.pushEdges(.In, &in_edges, compile_data.allocator);
1086 | // }
1087 | // }
1088 | // }
1089 |
1090 | try mir.functions.append(IRFunction.init(
1091 | @intCast(index),
1092 | ir_root.?,
1093 | mir.allocator,
1094 | ));
1095 |
1096 | try mir.functions.items[mir.functions.items.len - 1].regalloc(mir.allocator);
1097 | }
1098 | };
1099 |
1100 | pub const RegisterVM = struct {
1101 | pub fn init(vm: *VM) void {
1102 | _ = vm;
1103 | }
1104 |
1105 | pub fn deinit(vm: *VM) void {
1106 | _ = vm;
1107 | }
1108 |
1109 | pub fn instantiate(vm: *VM, module: *ModuleInstance, opts: ModuleInstantiateOpts) anyerror!void {
1110 | _ = vm;
1111 | _ = module;
1112 | _ = opts;
1113 | return error.Unimplemented;
1114 | }
1115 |
1116 | pub fn invoke(vm: *VM, module: *ModuleInstance, handle: FunctionHandle, params: [*]const Val, returns: [*]Val, opts: InvokeOpts) anyerror!void {
1117 | _ = vm;
1118 | _ = module;
1119 | _ = handle;
1120 | _ = params;
1121 | _ = returns;
1122 | _ = opts;
1123 | return error.Unimplemented;
1124 | }
1125 |
1126 | pub fn invokeWithIndex(vm: *VM, module: *ModuleInstance, func_index: usize, params: [*]const Val, returns: [*]Val) anyerror!void {
1127 | _ = vm;
1128 | _ = module;
1129 | _ = func_index;
1130 | _ = params;
1131 | _ = returns;
1132 | return error.Unimplemented;
1133 | }
1134 |
1135 | pub fn resumeInvoke(vm: *VM, module: *ModuleInstance, returns: []Val, opts: ResumeInvokeOpts) anyerror!void {
1136 | _ = vm;
1137 | _ = module;
1138 | _ = returns;
1139 | _ = opts;
1140 | return error.Unimplemented;
1141 | }
1142 |
1143 | pub fn step(vm: *VM, module: *ModuleInstance, returns: []Val) anyerror!void {
1144 | _ = vm;
1145 | _ = module;
1146 | _ = returns;
1147 | return error.Unimplemented;
1148 | }
1149 |
1150 | pub fn setDebugTrap(vm: *VM, module: *ModuleInstance, wasm_address: u32, mode: DebugTrapInstructionMode) anyerror!bool {
1151 | _ = vm;
1152 | _ = module;
1153 | _ = wasm_address;
1154 | _ = mode;
1155 | return error.Unimplemented;
1156 | }
1157 |
1158 | pub fn formatBacktrace(vm: *VM, indent: u8, allocator: std.mem.Allocator) anyerror!std.ArrayList(u8) {
1159 | _ = vm;
1160 | _ = indent;
1161 | _ = allocator;
1162 | return error.Unimplemented;
1163 | }
1164 |
1165 | pub fn findFuncTypeDef(vm: *VM, module: *ModuleInstance, local_func_index: usize) *const FunctionTypeDefinition {
1166 | _ = vm;
1167 | _ = module;
1168 | _ = local_func_index;
1169 | return &dummy_func_type_def;
1170 | }
1171 |
1172 | pub fn compile(vm: *RegisterVM, module_def: ModuleDefinition) AllocError!void {
1173 | var mir = ModuleIR.init(vm.allocator, module_def);
1174 | defer mir.deinit();
1175 |
1176 | try mir.compile();
1177 |
1178 | // wasm bytecode -> IR graph -> register-assigned IR graph ->
1179 | }
1180 | };
1181 |
1182 | const dummy_func_type_def = FunctionTypeDefinition{
1183 | .types = undefined,
1184 | .num_params = 0,
1185 | };
1186 |
1187 | // register instructions get a slice of the overall set of register slots, which are pointers to actual
1188 | // registers (?)
1189 |
1190 | const RegInstruction = struct {
1191 | registerSlotOffset: u32, // offset within the function register slot space to start
1192 | opcode: Opcode,
1193 | immediate: def.InstructionImmediates,
1194 |
1195 | fn numRegisters(self: RegInstruction) u4 {
1196 | switch (self.opcode) {}
1197 | }
1198 |
1199 | fn registers(self: RegInstruction, register_slice: []Val) []Val {
1200 | return register_slice[self.registerOffset .. self.registerOffset + self.numRegisters()];
1201 | }
1202 | };
1203 |
1204 | fn runTestWithViz(wasm_filepath: []const u8, viz_dir: []const u8) !void {
1205 | var allocator = std.testing.allocator;
1206 |
1207 | var cwd = std.fs.cwd();
1208 | const wasm_data: []u8 = try cwd.readFileAlloc(allocator, wasm_filepath, 1024 * 1024 * 128);
1209 | defer allocator.free(wasm_data);
1210 |
1211 | const module_def_opts = def.ModuleDefinitionOpts{
1212 | .debug_name = std.fs.path.basename(wasm_filepath),
1213 | };
1214 | var module_def = try ModuleDefinition.create(allocator, module_def_opts);
1215 | defer module_def.destroy();
1216 |
1217 | try module_def.decode(wasm_data);
1218 |
1219 | var mir = ModuleIR.init(allocator, module_def);
1220 | defer mir.deinit();
1221 | try mir.compile();
1222 | for (mir.functions.items, 0..) |func, i| {
1223 | var viz_path_buffer: [256]u8 = undefined;
1224 | const viz_path = std.fmt.bufPrint(&viz_path_buffer, "{s}\\viz_{}.txt", .{ viz_dir, i }) catch unreachable;
1225 | std.debug.print("gen graph for func {}\n", .{i});
1226 | try func.dumpVizGraph(viz_path, module_def.*, std.testing.allocator);
1227 | }
1228 | }
1229 |
1230 | // test "ir1" {
1231 | // const filename =
1232 | // // \\E:\Dev\zig_projects\bytebox\test\wasm\br_table\br_table.0.wasm
1233 | // \\E:\Dev\zig_projects\bytebox\test\wasm\return\return.0.wasm
1234 | // // \\E:\Dev\third_party\zware\test\fact.wasm
1235 | // // \\E:\Dev\zig_projects\bytebox\test\wasm\i32\i32.0.wasm
1236 | // ;
1237 | // const viz_dir =
1238 | // \\E:\Dev\zig_projects\bytebox\viz
1239 | // ;
1240 | // try runTestWithViz(filename, viz_dir);
1241 |
1242 | // // var allocator = std.testing.allocator;
1243 |
1244 | // // var cwd = std.fs.cwd();
1245 | // // var wasm_data: []u8 = try cwd.readFileAlloc(allocator, filename, 1024 * 1024 * 128);
1246 | // // defer allocator.free(wasm_data);
1247 |
1248 | // // const module_def_opts = def.ModuleDefinitionOpts{
1249 | // // .debug_name = std.fs.path.basename(filename),
1250 | // // };
1251 | // // var module_def = ModuleDefinition.init(allocator, module_def_opts);
1252 | // // defer module_def.deinit();
1253 |
1254 | // // try module_def.decode(wasm_data);
1255 |
1256 | // // var mir = ModuleIR.init(allocator, &module_def);
1257 | // // defer mir.deinit();
1258 | // // try mir.compile();
1259 | // // for (mir.functions.items, 0..) |func, i| {
1260 | // // var viz_path_buffer: [256]u8 = undefined;
1261 | // // const path_format =
1262 | // // \\E:\Dev\zig_projects\bytebox\viz\viz_{}.txt
1263 | // // ;
1264 | // // const viz_path = std.fmt.bufPrint(&viz_path_buffer, path_format, .{i}) catch unreachable;
1265 | // // std.debug.print("gen graph for func {}\n", .{i});
1266 | // // try func.dumpVizGraph(viz_path, module_def, std.testing.allocator);
1267 | // // }
1268 | // }
1269 |
1270 | // test "ir2" {
1271 | // const filename =
1272 | // // \\E:\Dev\zig_projects\bytebox\test\wasm\br_table\br_table.0.wasm
1273 | // \\E:\Dev\zig_projects\bytebox\test\reg\add.wasm
1274 | // // \\E:\Dev\third_party\zware\test\fact.wasm
1275 | // // \\E:\Dev\zig_projects\bytebox\test\wasm\i32\i32.0.wasm
1276 | // ;
1277 | // const viz_dir =
1278 | // \\E:\Dev\zig_projects\bytebox\test\reg\
1279 | // ;
1280 | // try runTestWithViz(filename, viz_dir);
1281 | // }
1282 |
--------------------------------------------------------------------------------
/test/mem64/main.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 | const bytebox = @import("bytebox");
3 | const Val = bytebox.Val;
4 |
5 | pub fn main() !void {
6 | std.debug.print("\nRunning mem64 test...\n", .{});
7 |
8 | var gpa = std.heap.GeneralPurposeAllocator(.{}){};
9 | var allocator: std.mem.Allocator = gpa.allocator();
10 |
11 | const wasm_data: []u8 = try std.fs.cwd().readFileAlloc(allocator, "zig-out/bin/memtest.wasm", 1024 * 512);
12 | defer allocator.free(wasm_data);
13 |
14 | const module_def = try bytebox.createModuleDefinition(allocator, .{});
15 | defer module_def.destroy();
16 | try module_def.decode(wasm_data);
17 |
18 | const module_instance = try bytebox.createModuleInstance(.Stack, module_def, allocator);
19 | defer module_instance.destroy();
20 | try module_instance.instantiate(.{});
21 |
22 | const handle = try module_instance.getFunctionHandle("memtest");
23 | const input = [4]Val{ .{ .I32 = 27368 }, .{ .I64 = 34255 }, .{ .F32 = 34234.8690 }, .{ .F64 = 989343.2849 } };
24 | var output = [1]Val{.{ .I32 = 0 }};
25 | try module_instance.invoke(handle, &input, &output, .{});
26 |
27 | if (output[0].I32 != 0) {
28 | return error.TestFailed;
29 | }
30 |
31 | std.debug.print("success\n", .{});
32 | }
33 |
--------------------------------------------------------------------------------
/test/mem64/memtest.zig:
--------------------------------------------------------------------------------
1 | const std = @import("std");
2 |
3 | const KB = 1024;
4 | const MB = 1024 * KB;
5 | const GB = 1024 * MB;
6 |
7 | const PAGE_SIZE = 64 * KB;
8 | const PAGES_PER_GB = GB / PAGE_SIZE;
9 |
10 | const GLOBAL_DATA: []const volatile u8 = "YNDKMI*#"; // tests if data segments use index type
11 |
12 | fn assert(cond: bool) !void {
13 | if (!cond) {
14 | return error.Failed;
15 | }
16 | }
17 |
18 | fn alignPtr(mem: [*]volatile u8, alignment: usize) [*]volatile u8 {
19 | return @ptrFromInt(std.mem.alignForward(usize, @intFromPtr(mem), alignment)); // volatile?
20 | }
21 |
22 | fn alignToSinglePtr(comptime T: type, mem: [*]volatile u8) *volatile T {
23 | const mem_aligned = alignPtr(mem, @alignOf(T));
24 | return @ptrCast(@alignCast(mem_aligned));
25 | }
26 |
27 | export fn memtest(val_i32: i32, val_i64: i64, val_f32: f32, val_f64: f64) i32 {
28 | testInternal(val_i32, val_i64, val_f32, val_f64) catch {
29 | return 1;
30 | };
31 | return 0;
32 | }
33 |
34 | fn testInternal(val_i32: i32, val_i64: i64, val_f32: f32, val_f64: f64) !void {
35 | const grow_value: isize = @wasmMemoryGrow(0, PAGES_PER_GB * 6); // memory.grow
36 | try assert(grow_value != -1);
37 |
38 | // volatile pointers ensure the loads and stores don't get optimized away
39 | const start_page: [*]volatile u8 = @ptrFromInt(@as(usize, @intCast(grow_value)));
40 |
41 | const mem = start_page + (GB * 4);
42 | const mem_loads: [*]volatile u8 = mem + MB * 2;
43 | const mem_stores: [*]volatile u8 = mem + MB * 1;
44 |
45 | const num_pages: usize = @wasmMemorySize(0);
46 | try assert(num_pages >= PAGES_PER_GB * 6);
47 |
48 | const ptr_load_i32 = alignToSinglePtr(i32, mem_loads + 0);
49 | const ptr_load_i64 = alignToSinglePtr(i64, mem_loads + 64);
50 | const ptr_load_f32 = alignToSinglePtr(f32, mem_loads + 128);
51 | const ptr_load_f64 = alignToSinglePtr(f64, mem_loads + 192);
52 |
53 | ptr_load_i32.* = val_i32; // i32.store
54 | ptr_load_i64.* = val_i64; // i64.store
55 | ptr_load_f32.* = val_f32; // f32.store
56 | ptr_load_f64.* = val_f64; // f64.store
57 |
58 | try assert(ptr_load_i32.* == val_i32);
59 | try assert(ptr_load_i64.* == val_i64);
60 | try assert(ptr_load_f32.* == val_f32);
61 | try assert(ptr_load_f64.* == val_f64);
62 |
63 | const ptr_store_i32 = alignToSinglePtr(i32, mem_stores + 0);
64 | const ptr_store_i64 = alignToSinglePtr(i64, mem_stores + 64);
65 | const ptr_store_f32 = alignToSinglePtr(f32, mem_stores + 128);
66 | const ptr_store_f64 = alignToSinglePtr(f64, mem_stores + 192);
67 |
68 | ptr_store_i32.* = ptr_load_i32.*; // i32.load && i32.store
69 | ptr_store_i64.* = ptr_load_i64.*; // i64.load && i64.store
70 | ptr_store_f32.* = ptr_load_f32.*; // f32.load && f32.store
71 | ptr_store_f64.* = ptr_load_f64.*; // f64.load && f64.store
72 |
73 | try assert(ptr_store_i32.* == ptr_load_i32.*);
74 | try assert(ptr_store_i64.* == ptr_load_i64.*);
75 | try assert(ptr_store_f32.* == ptr_load_f32.*);
76 | try assert(ptr_store_f64.* == ptr_load_f64.*);
77 |
78 | var load32: i32 = 0;
79 | ptr_load_i32.* = 0x7F;
80 | load32 = @as(*volatile i8, @ptrCast(@alignCast(ptr_load_i32))).*; // i32.load8_s
81 | try assert(load32 == 0x7F);
82 | ptr_load_i32.* = 0xFF;
83 | load32 = @as(*volatile u8, @ptrCast(@alignCast(ptr_load_i32))).*; // i32.load8_u
84 | try assert(load32 == 0xFF);
85 | ptr_load_i32.* = 0x7FFF;
86 | load32 = @as(*volatile i16, @ptrCast(@alignCast(ptr_load_i32))).*; // i32.load16_s
87 | try assert(load32 == 0x7FFF);
88 | ptr_load_i32.* = 0xFFFF;
89 | load32 = @as(*volatile u16, @ptrCast(@alignCast(ptr_load_i32))).*; // i32.load16_s
90 | try assert(load32 == 0xFFFF);
91 |
92 | var load64: i64 = 0;
93 | ptr_load_i64.* = 0x7F;
94 | load64 = @as(*volatile i8, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load8_s
95 | try assert(load64 == 0x7F);
96 | ptr_load_i64.* = 0xFF;
97 | load64 = @as(*volatile u8, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load8_u
98 | try assert(load64 == 0xFF);
99 | ptr_load_i64.* = 0x7FFF;
100 | load64 = @as(*volatile i16, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load16_s
101 | try assert(load64 == 0x7FFF);
102 | ptr_load_i64.* = 0xFFFF;
103 | load64 = @as(*volatile u16, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load16_u
104 | try assert(load64 == 0xFFFF);
105 | ptr_load_i64.* = 0x7FFFFFFF;
106 | load64 = @as(*volatile i32, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load32_s
107 | try assert(load64 == 0x7FFFFFFF);
108 | ptr_load_i64.* = 0xFFFFFFFF;
109 | load64 = @as(*volatile u32, @ptrCast(@alignCast(ptr_load_i64))).*; // i64.load32_u
110 | try assert(load64 == 0xFFFFFFFF);
111 |
112 | const memset_dest = (mem + KB)[0..KB];
113 | const memcpy_dest = (mem + KB * 2)[0..KB];
114 | @memset(memset_dest, 0xFF); // memory.fill
115 | @memcpy(memcpy_dest, memset_dest); // memory.copy
116 |
117 | try assert(memset_dest[0] == 0xFF);
118 | try assert(memset_dest[KB - 1] == 0xFF);
119 | try assert(memcpy_dest[0] == 0xFF);
120 | try assert(memcpy_dest[KB - 1] == 0xFF);
121 |
122 | // forces data segment to be generated
123 | @memcpy(memcpy_dest[0..GLOBAL_DATA.len], GLOBAL_DATA);
124 |
125 | try assert(memcpy_dest[0] == 'Y');
126 | try assert(memcpy_dest[1] == 'N');
127 | try assert(memcpy_dest[2] == 'D');
128 | try assert(memcpy_dest[3] == 'K');
129 | try assert(memcpy_dest[4] == 'M');
130 | try assert(memcpy_dest[5] == 'I');
131 | try assert(memcpy_dest[6] == '*');
132 | try assert(memcpy_dest[7] == '#');
133 | }
134 |
--------------------------------------------------------------------------------
/test/wasi/bytebox_adapter.py:
--------------------------------------------------------------------------------
1 | # Based on the wasmtime adapter in wasi-testsuite
2 | import argparse
3 | import subprocess
4 | import sys
5 | import os
6 | import shlex
7 |
8 | current_file_path = os.path.dirname(os.path.realpath(__file__))
9 | bytebox_relative_path = "../../zig-out/bin/bytebox"
10 | if sys.platform == 'Windows':
11 | bytebox_relative_path += ".exe"
12 | BYTEBOX = os.path.join(current_file_path, bytebox_relative_path)
13 |
14 | parser = argparse.ArgumentParser()
15 | parser.add_argument("--version", action="store_true")
16 | parser.add_argument("--test-file", action="store")
17 | parser.add_argument("--arg", action="append", default=[])
18 | parser.add_argument("--env", action="append", default=[])
19 | parser.add_argument("--dir", action="append", default=[])
20 |
21 | args = parser.parse_args()
22 |
23 | if args.version:
24 | subprocess.run([BYTEBOX] + ["--version"])
25 | sys.exit(0)
26 |
27 | TEST_FILE = args.test_file
28 | PROG_ARGS = args.arg
29 | ENV_ARGS = [j for i in args.env for j in ["--env", i]]
30 | DIR_ARGS = [j for i in args.dir for j in ["--dir", i]]
31 |
32 | ALL_ARGS = [BYTEBOX] + [TEST_FILE] + PROG_ARGS + ENV_ARGS + DIR_ARGS
33 |
34 | r = subprocess.run(ALL_ARGS)
35 | sys.exit(r.returncode)
36 |
--------------------------------------------------------------------------------
/test/wasi/run.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 | completedProcess = subprocess.run([
4 | "python3",
5 | "test/wasi/wasi-testsuite/test-runner/wasi_test_runner.py",
6 | "-r",
7 | "test/wasi/bytebox_adapter.py",
8 | "-t",
9 | "./test/wasi/wasi-testsuite/tests/assemblyscript/testsuite/",
10 | "./test/wasi/wasi-testsuite/tests/c/testsuite/",
11 | "./test/wasi/wasi-testsuite/tests/rust/testsuite/"])
12 |
13 | # the wasi tests leave a bunch of untracked files around after a test run
14 | subprocess.run(["git", "clean", "-f"], cwd="test/wasi/wasi-testsuite")
15 |
16 | # propagate the test suite return code if there was an error
17 | if completedProcess.returncode != 0:
18 | exit(completedProcess.returncode)
19 |
--------------------------------------------------------------------------------
/test/wasi/runtests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | python3 wasi-testsuite/test-runner/wasi_test_runner.py -t ./wasi-testsuite/tests/assemblyscript/testsuite/ ./wasi-testsuite/tests/c/testsuite/ -r ./bytebox_adapter.sh
3 |
--------------------------------------------------------------------------------