├── nix ├── bazel │ ├── BUILD.bazel │ ├── nixpkgs.nix │ ├── env.nix │ └── flatten.bzl ├── cc │ ├── BUILD.bazel │ └── cc.nix └── rust │ ├── fenix.nix │ ├── rust_platform.nix │ ├── wasm_bindgen.nix │ └── BUILD.bazel ├── tailwindcss ├── BUILD.bazel └── tailwindcss.bzl ├── third_party ├── nix │ ├── BUILD.bazel │ ├── diesel_cli │ │ ├── BUILD.bazel │ │ ├── bazel.nix │ │ └── default.nix │ ├── jemalloc │ │ ├── BUILD.bazel │ │ ├── bazel.nix │ │ ├── BUILD.jemalloc │ │ └── default.nix │ ├── oci_base │ │ ├── BUILD.bazel │ │ └── default.nix │ ├── cargo-bazel │ │ ├── BUILD.bazel │ │ ├── bazel.nix │ │ └── default.nix │ ├── BUILD.libunwind │ ├── BUILD.tailwindcss │ ├── BUILD.wasm-bindgen-cli │ ├── BUILD.postgresql │ ├── BUILD.sqlite │ ├── leptosfmt │ │ └── default.nix │ ├── BUILD.protobuf │ └── deps.bzl └── rust │ ├── BUILD.bazel │ └── patches │ ├── prost-build │ ├── BUILD.bazel │ └── 0001-Allow-substitution-for-the-message-type-in-type-attr.patch │ ├── rules_rust │ ├── BUILD.bazel │ └── 0001-Revert-Update-runfiles-to-work-with-directory-embedd.patch │ └── diesel-tracing │ ├── BUILD.bazel │ └── fix-pgmetadata.patch ├── .bazelignore ├── BUILD.bazel ├── img ├── ss.png └── testhistory.png ├── blade ├── static │ ├── favicon.ico │ ├── tailwind.css │ ├── success.svg │ ├── bazel.svg │ ├── zoom-reset.svg │ ├── sort.svg │ ├── fail.svg │ ├── skip.svg │ ├── copy.svg │ ├── code.svg │ ├── history.svg │ ├── shard.svg │ ├── BUILD.bazel │ ├── zoom-out.svg │ ├── disconnect.svg │ ├── zoom-in.svg │ ├── spinner.svg │ ├── number.svg │ └── test.svg ├── db │ ├── postgres │ │ ├── migrations │ │ │ ├── 2024-02-02-070857_options │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-06-30-075408_outputindex │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-07-26-000000_add_profile_uri │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-07-19-000001_add_liveness │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-11-19-000100_drop_tests_name_trgm │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2023-11-29-101344_init │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-11-19-000000_unique_test_names │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-09-16-000100_add_trgm │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-09-16-000000_add_indexes │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-06-20-211740_newoutput │ │ │ │ ├── up.sql │ │ │ │ └── down.sql │ │ │ └── 2025-09-16-000200_fix_large_keyval_indexes │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ ├── diesel.toml │ │ └── schema.rs │ ├── sqlite │ │ ├── migrations │ │ │ ├── 2024-02-01-092732_options │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-06-30-081840_outputindex │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-07-26-000000_add_profile_uri │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-07-19-000001_add_liveness │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2023-11-29-101344_init │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ ├── 2025-11-19-000000_unique_test_names │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ └── 2025-06-21-082528_newoutput │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ ├── diesel.toml │ │ └── schema.rs │ ├── time.rs │ ├── lib.rs │ ├── BUILD.bazel │ ├── envscrub.rs │ ├── manager.rs │ └── exec.rs ├── leptos.toml ├── components │ ├── dashboard │ │ ├── mod.rs │ │ ├── pass_fail_chart.rs │ │ ├── graphs.rs │ │ ├── pass_fail_scatterplot.rs │ │ └── duration_chart.rs │ ├── navigation.rs │ ├── charts │ │ └── mod.rs │ ├── lib.rs │ ├── card.rs │ ├── list.rs │ ├── measuretime.rs │ ├── clipboard.rs │ ├── tooltip.rs │ ├── searchbar.rs │ ├── BUILD.bazel │ ├── accordion.rs │ ├── statusicon.rs │ └── shellout.rs ├── routes │ ├── lib.rs │ ├── empty.rs │ ├── artifact.rs │ ├── app.rs │ ├── BUILD.bazel │ ├── summary.rs │ └── profile.rs ├── bytestream │ ├── proto │ │ └── BUILD.bazel │ └── BUILD.bazel ├── tailwindmerge │ ├── BUILD.bazel │ └── lib.rs ├── memdump │ ├── BUILD.bazel │ └── lib.rs ├── metrics │ ├── BUILD.bazel │ └── lib.rs ├── bep │ ├── protos │ │ ├── descriptors.bzl │ │ ├── package_load_metrics.proto │ │ ├── option_filters.proto │ │ ├── action_cache.proto │ │ └── BUILD.bazel │ ├── proto_registry │ │ ├── BUILD.bazel │ │ └── lib.rs │ ├── buildtoollogs.rs │ ├── print_event.rs │ ├── BUILD.bazel │ ├── options.rs │ └── buildinfo.rs ├── darkmode │ ├── BUILD.bazel │ └── lib.rs ├── trace_event_parser │ └── BUILD.bazel ├── prototime │ └── BUILD.bazel ├── shared │ ├── BUILD.bazel │ └── lib.rs ├── state │ └── BUILD.bazel └── lib.rs ├── .bazel-lib └── bazelrc │ ├── osx.bazelrc │ ├── release.bazelrc │ ├── clippy.bazelrc │ ├── nix-toolchains.bazelrc │ ├── convenience.bazelrc │ ├── correctness.bazelrc │ └── performance.bazelrc ├── .gitignore ├── .envrc ├── tailwind.config.js ├── postgres └── harness │ ├── BUILD.bazel │ └── lib.rs ├── .bazelrc ├── tools └── streamstress │ └── BUILD.bazel ├── LICENSE ├── .rustfmt.toml ├── prost └── BUILD.bazel ├── .github └── workflows │ ├── ci.yaml │ └── release.yaml ├── MODULE.bazel ├── README.md ├── flake.nix └── WORKSPACE.bazel /nix/bazel/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tailwindcss/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/rust/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/nix/diesel_cli/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/nix/jemalloc/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/nix/oci_base/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/nix/cargo-bazel/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.bazelignore: -------------------------------------------------------------------------------- 1 | .direnv/ 2 | .devenv 3 | .git/ 4 | -------------------------------------------------------------------------------- /third_party/rust/patches/prost-build/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/rust/patches/rules_rust/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /BUILD.bazel: -------------------------------------------------------------------------------- 1 | exports_files(["tailwind.config.js"]) 2 | -------------------------------------------------------------------------------- /third_party/rust/patches/diesel-tracing/BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /img/ss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DolceTriade/blade/HEAD/img/ss.png -------------------------------------------------------------------------------- /img/testhistory.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DolceTriade/blade/HEAD/img/testhistory.png -------------------------------------------------------------------------------- /blade/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DolceTriade/blade/HEAD/blade/static/favicon.ico -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/osx.bazelrc: -------------------------------------------------------------------------------- 1 | build:macos --build_tag_filters=-noosx 2 | test:macos --test_tag_filters=-noosx -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2024-02-02-070857_options/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX Options_Inv_ID; 2 | DROP TABLE Options; 3 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-06-30-075408_outputindex/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX IF EXISTS invocationoutput_inv_id; 2 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2024-02-01-092732_options/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX Options_Inv_ID; 2 | DROP TABLE Options; 3 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-06-30-081840_outputindex/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX IF EXISTS invocationoutput_inv_id; 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .direnv 2 | .devenv 3 | bazel-* 4 | .git 5 | .vscode 6 | .bazelenvrc 7 | rust-project.json 8 | user.bazelrc 9 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-07-26-000000_add_profile_uri/down.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE Invocations DROP COLUMN profile_uri; 2 | -------------------------------------------------------------------------------- /third_party/nix/jemalloc/bazel.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs = import {}; 3 | in 4 | pkgs.callPackage ./default.nix {} 5 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-07-26-000000_add_profile_uri/down.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE invocations DROP COLUMN profile_uri; 2 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-07-26-000000_add_profile_uri/up.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE invocations ADD COLUMN profile_uri TEXT; 2 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-07-26-000000_add_profile_uri/up.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE Invocations ADD COLUMN profile_uri TEXT; 2 | -------------------------------------------------------------------------------- /blade/static/tailwind.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @custom-variant dark (&:where(.dark, .dark *)); -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-06-30-081840_outputindex/up.sql: -------------------------------------------------------------------------------- 1 | CREATE INDEX IF NOT EXISTS invocationoutput_inv_id ON InvocationOutput (invocation_id); 2 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-07-19-000001_add_liveness/down.sql: -------------------------------------------------------------------------------- 1 | -- Rollback last_heartbeat field 2 | ALTER TABLE invocations DROP COLUMN last_heartbeat; 3 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-07-19-000001_add_liveness/down.sql: -------------------------------------------------------------------------------- 1 | -- Rollback last_heartbeat field 2 | ALTER TABLE Invocations DROP COLUMN last_heartbeat; 3 | -------------------------------------------------------------------------------- /blade/leptos.toml: -------------------------------------------------------------------------------- 1 | [package.metadata.leptos] 2 | output-name = "blade_wasm" 3 | site-root = "./blade" 4 | site-addr = "0.0.0.0:3000" 5 | site-pkg-dir = "assets" 6 | -------------------------------------------------------------------------------- /blade/components/dashboard/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod duration_chart; 2 | pub mod filters; 3 | pub mod graphs; 4 | pub mod pass_fail_chart; 5 | pub mod test_history_table; 6 | pub mod test_search; 7 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-07-19-000001_add_liveness/up.sql: -------------------------------------------------------------------------------- 1 | -- Add last_heartbeat field for tracking active streams 2 | ALTER TABLE Invocations ADD COLUMN last_heartbeat TEXT; 3 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-07-19-000001_add_liveness/up.sql: -------------------------------------------------------------------------------- 1 | -- Add last_heartbeat field for tracking active streams 2 | ALTER TABLE invocations ADD COLUMN last_heartbeat TIMESTAMP; 3 | -------------------------------------------------------------------------------- /blade/components/navigation.rs: -------------------------------------------------------------------------------- 1 | use leptos_dom::helpers::window; 2 | 3 | /// Opens a URL in a new browser tab 4 | pub fn open_in_new_tab(url: &str) { let _ = window().open_with_url_and_target(url, "_blank"); } 5 | -------------------------------------------------------------------------------- /blade/db/time.rs: -------------------------------------------------------------------------------- 1 | use time::OffsetDateTime; 2 | 3 | pub fn to_systemtime(t: &OffsetDateTime) -> anyhow::Result { 4 | prototime::timestamp::from_unix_sec(t.unix_timestamp() as u64) 5 | } 6 | -------------------------------------------------------------------------------- /blade/routes/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod app; 2 | pub mod artifact; 3 | pub mod dashboard; 4 | pub mod details; 5 | pub mod empty; 6 | pub mod invocation; 7 | pub mod profile; 8 | pub mod summary; 9 | pub mod test; 10 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-11-19-000100_drop_tests_name_trgm/down.sql: -------------------------------------------------------------------------------- 1 | -- Rollback: Recreate trigram index on tests.name 2 | CREATE INDEX IF NOT EXISTS tests_name_trgm_idx ON tests USING GIN (name gin_trgm_ops); 3 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.libunwind: -------------------------------------------------------------------------------- 1 | filegroup( 2 | name = "pkgconfig", 3 | # HACK: Use the pkgconfig dir so we can pass it to rust. 4 | srcs = ["lib/pkgconfig"], 5 | visibility = ["//visibility:public"], 6 | ) 7 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2023-11-29-101344_init/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX Targets_Inv_ID; 2 | DROP INDEX Tests_Inv_ID; 3 | DROP TABLE TestRuns; 4 | DROP TABLE TestArtifacts; 5 | DROP TABLE Tests; 6 | DROP TABLE Targets; 7 | DROP TABLE Invocations; -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2023-11-29-101344_init/down.sql: -------------------------------------------------------------------------------- 1 | DROP INDEX Targets_Inv_ID; 2 | DROP INDEX Tests_Inv_ID; 3 | DROP TABLE TestRuns; 4 | DROP TABLE TestArtifacts; 5 | DROP TABLE Tests; 6 | DROP TABLE Targets; 7 | DROP TABLE Invocations; -------------------------------------------------------------------------------- /third_party/nix/cargo-bazel/bazel.nix: -------------------------------------------------------------------------------- 1 | let 2 | platform = import ../../../nix/rust/rust_platform.nix; 3 | pkgs = platform.pkgs; 4 | in 5 | with platform; 6 | pkgs.callPackage ./default.nix { 7 | inherit rustPlatform; 8 | } 9 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.tailwindcss: -------------------------------------------------------------------------------- 1 | genrule( 2 | name = "tailwindcss-cli", 3 | srcs = ["bin/tailwindcss"], 4 | outs = ["tailwindcss"], 5 | cmd = "cp $< $@", 6 | executable = True, 7 | visibility = ["//visibility:public"], 8 | ) 9 | -------------------------------------------------------------------------------- /blade/components/charts/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod ganttchart; 2 | pub mod linechart; 3 | pub mod piechart; 4 | 5 | pub(super) fn get_mouse_position_from_event(event: &web_sys::MouseEvent) -> (f64, f64) { 6 | (event.client_x() as f64, event.client_y() as f64) 7 | } 8 | -------------------------------------------------------------------------------- /nix/cc/BUILD.bazel: -------------------------------------------------------------------------------- 1 | toolchain( 2 | name = "nix_cc_toolchain", 3 | target_compatible_with = ["@platforms//cpu:x86_64"], 4 | toolchain = "@nixpkgs_config_cc//:cc-compiler-k8", 5 | toolchain_type = "@bazel_tools//tools/cpp:toolchain_type", 6 | ) 7 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.wasm-bindgen-cli: -------------------------------------------------------------------------------- 1 | genrule( 2 | name = "wasm-bindgen-cli", 3 | srcs = ["bin/wasm-bindgen"], 4 | outs = ["wasm-bindgen"], 5 | cmd = "cp $< $@", 6 | executable = True, 7 | visibility = ["//visibility:public"], 8 | ) 9 | -------------------------------------------------------------------------------- /blade/bytestream/proto/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust_prost//:defs.bzl", "rust_prost_library") 2 | 3 | rust_prost_library( 4 | name = "bytestream_proto", 5 | proto = "@googleapis//google/bytestream:bytestream_proto", 6 | visibility = ["//visibility:public"], 7 | ) 8 | -------------------------------------------------------------------------------- /third_party/nix/diesel_cli/bazel.nix: -------------------------------------------------------------------------------- 1 | let 2 | platform = import ../../../nix/rust/rust_platform.nix; 3 | pkgs = platform.pkgs; 4 | in 5 | with platform; 6 | pkgs.callPackage ./default.nix { 7 | inherit rustPlatform; 8 | mysqlSupport = false; 9 | } 10 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-11-19-000100_drop_tests_name_trgm/up.sql: -------------------------------------------------------------------------------- 1 | -- Drop trigram index on tests.name since we now search on unique_test_names table 2 | -- Keep the regular btree index (tests_name_idx) for joins and equality filters 3 | DROP INDEX IF EXISTS tests_name_trgm_idx; 4 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-11-19-000000_unique_test_names/down.sql: -------------------------------------------------------------------------------- 1 | -- Rollback: Drop trigger and function 2 | DROP TRIGGER IF EXISTS maintain_unique_test_names_trigger ON tests; 3 | DROP FUNCTION IF EXISTS maintain_unique_test_names(); 4 | 5 | -- Drop table 6 | DROP TABLE IF EXISTS unique_test_names; 7 | -------------------------------------------------------------------------------- /blade/db/sqlite/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see https://diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "schema.rs" 6 | custom_type_derives = ["diesel::query_builder::QueryId"] 7 | 8 | [migrations_directory] 9 | dir = "migrations" 10 | -------------------------------------------------------------------------------- /blade/db/postgres/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see https://diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "schema.rs" 6 | custom_type_derives = ["diesel::query_builder::QueryId"] 7 | 8 | [migrations_directory] 9 | dir = "migrations" 10 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.postgresql: -------------------------------------------------------------------------------- 1 | cc_library( 2 | name = "postgresql", 3 | srcs = select({ 4 | "@platforms//os:osx": ["lib/libpq.dylib"], 5 | "//conditions:default": glob(["lib/libpq.*"]), 6 | }), 7 | linkstatic = False, 8 | visibility = ["//visibility:public"], 9 | ) 10 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/release.bazelrc: -------------------------------------------------------------------------------- 1 | build:release --@rules_rust//:extra_exec_rustc_flags=-Copt-level=3 2 | build:release --@rules_rust//:extra_exec_rustc_flags=-Cembed-bitcode=yes 3 | build:release --@rules_rust//:extra_exec_rustc_flags=-Clto=thin 4 | build:release --@rules_rust//:extra_exec_rustc_flags=-Zdylib-lto 5 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000100_add_trgm/down.sql: -------------------------------------------------------------------------------- 1 | -- Drop trigram indexes then extension (extension drop will fail if others depend on it) 2 | DROP INDEX IF EXISTS options_keyval_trgm_idx; 3 | DROP INDEX IF EXISTS invocationoutput_line_trgm_idx; 4 | DROP INDEX IF EXISTS tests_name_trgm_idx; 5 | DROP EXTENSION IF EXISTS pg_trgm; 6 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-11-19-000000_unique_test_names/down.sql: -------------------------------------------------------------------------------- 1 | -- Rollback: Drop triggers and table 2 | DROP TRIGGER IF EXISTS maintain_unique_test_names_insert; 3 | DROP TRIGGER IF EXISTS maintain_unique_test_names_update; 4 | DROP TRIGGER IF EXISTS maintain_unique_test_names_delete; 5 | 6 | -- Drop table 7 | DROP TABLE IF EXISTS unique_test_names; 8 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/clippy.bazelrc: -------------------------------------------------------------------------------- 1 | build --aspects=@rules_rust//rust:defs.bzl%rust_clippy_aspect 2 | build --output_groups=+clippy_checks 3 | build --output_groups=+default 4 | build --show_result=0 5 | # Nix passes this in for us? 6 | build --@rules_rust//rust/settings:toolchain_generated_sysroot=false 7 | build --@rules_rust//rust/settings:pipelined_compilation=true 8 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | # Description: Direnv configuration for Nix 2 | # See instructions in flake.nix 3 | 4 | if ! has nix_direnv_version || ! nix_direnv_version 2.2.1; then 5 | source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.2.1/direnvrc" "sha256-zelF0vLbEl5uaqrfIzbgNzJWGmLzCmYAkInj/LNxvKs=" 6 | fi 7 | watch_file ./nix/bazel/env.nix 8 | use flake . --impure -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2024-02-02-070857_options/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Options ( 2 | id TEXT NOT NULL PRIMARY KEY, 3 | invocation_id TEXT NOT NULL, 4 | kind TEXT NOT NULL, 5 | keyval TEXT NOT NULL, 6 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 7 | ON DELETE CASCADE 8 | ); 9 | CREATE INDEX Options_Inv_ID ON Options ( invocation_id ); -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2024-02-01-092732_options/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Options ( 2 | id TEXT NOT NULL PRIMARY KEY, 3 | invocation_id TEXT NOT NULL, 4 | kind TEXT NOT NULL, 5 | keyval TEXT NOT NULL, 6 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 7 | ON DELETE CASCADE 8 | ); 9 | CREATE INDEX Options_Inv_ID ON Options ( invocation_id ); -------------------------------------------------------------------------------- /blade/tailwindmerge/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "tailwindmerge", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | visibility = ["//visibility:public"], 9 | ) 10 | 11 | rust_test( 12 | name = "tailwindmerge_test", 13 | size = "small", 14 | crate = ":tailwindmerge", 15 | ) 16 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.sqlite: -------------------------------------------------------------------------------- 1 | cc_library( 2 | name = "sqlite", 3 | srcs = select({ 4 | "@platforms//os:osx": ["lib/libsqlite3.dylib"], 5 | "//conditions:default": glob( 6 | ["lib/libsqlite3.*"], 7 | exclude = ["**/*.la"], 8 | ), 9 | }), 10 | linkstatic = False, 11 | visibility = ["//visibility:public"], 12 | ) 13 | -------------------------------------------------------------------------------- /nix/bazel/nixpkgs.nix: -------------------------------------------------------------------------------- 1 | let 2 | lock = builtins.fromJSON (builtins.readFile ../../flake.lock); 3 | src = lock.nodes."${lock.nodes.root.inputs.nixpkgs}".locked; 4 | nixpkgs = assert src.type == "github"; 5 | fetchTarball { 6 | url = "https://github.com/${src.owner}/${src.repo}/archive/${src.rev}.tar.gz"; 7 | sha256 = src.narHash; 8 | }; 9 | in 10 | import nixpkgs -------------------------------------------------------------------------------- /nix/rust/fenix.nix: -------------------------------------------------------------------------------- 1 | let 2 | lock = builtins.fromJSON (builtins.readFile ../../flake.lock); 3 | fenixSrc = lock.nodes.fenix.locked; 4 | fenix = assert fenixSrc.type == "github"; 5 | fetchTarball { 6 | url = "https://github.com/${fenixSrc.owner}/${fenixSrc.repo}/archive/${fenixSrc.rev}.tar.gz"; 7 | sha256 = fenixSrc.narHash; 8 | }; 9 | in 10 | import fenix 11 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000000_add_indexes/down.sql: -------------------------------------------------------------------------------- 1 | -- Drop performance indexes 2 | DROP INDEX IF EXISTS invocationoutput_inv_id_id_idx; 3 | DROP INDEX IF EXISTS options_build_metadata_keyval_idx; 4 | DROP INDEX IF EXISTS options_keyval_idx; 5 | DROP INDEX IF EXISTS testartifacts_inv_id_idx; 6 | DROP INDEX IF EXISTS testruns_test_id_idx; 7 | DROP INDEX IF EXISTS tests_name_idx; 8 | DROP INDEX IF EXISTS invocations_start_idx; 9 | -------------------------------------------------------------------------------- /blade/components/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod accordion; 2 | pub mod card; 3 | pub mod charts; 4 | pub mod clipboard; 5 | pub mod dashboard; 6 | pub mod list; 7 | pub mod measuretime; 8 | pub mod nav; 9 | pub mod navigation; 10 | pub mod searchbar; 11 | pub mod shellout; 12 | pub mod statusicon; 13 | pub mod summaryheader; 14 | pub mod targetlist; 15 | pub mod testartifactlist; 16 | pub mod testinsights; 17 | pub mod testresults; 18 | pub mod testrunlist; 19 | pub mod testsummary; 20 | pub mod tooltip; 21 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-06-20-211740_newoutput/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | 3 | CREATE TABLE InvocationOutput ( 4 | id SERIAL PRIMARY KEY, 5 | invocation_id TEXT NOT NULL REFERENCES Invocations(id) ON DELETE CASCADE, 6 | line TEXT NOT NULL 7 | ); 8 | 9 | INSERT INTO InvocationOutput (invocation_id, line) 10 | SELECT 11 | i.id, 12 | UNNEST(STRING_TO_ARRAY(i.output, E'\n')) AS line 13 | FROM 14 | Invocations i; 15 | 16 | ALTER TABLE Invocations 17 | DROP COLUMN output; 18 | -------------------------------------------------------------------------------- /tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | darkMode: "class", 4 | content: { 5 | files: ["./**/*.rs"], 6 | }, 7 | theme: { 8 | extend: { 9 | maxWidth: { 10 | "1/4": "25%", 11 | "1/2": "50%", 12 | "3/4": "75%", 13 | }, 14 | }, 15 | }, 16 | plugins: [], 17 | variants: { 18 | "overflow": ["hover", "group-hover"], 19 | "visibility": ["hover", "group-hover"], 20 | "invert": ["dark"], 21 | } 22 | } -------------------------------------------------------------------------------- /blade/static/success.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 8 | -------------------------------------------------------------------------------- /third_party/nix/oci_base/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs = import {}; 3 | packages = with pkgs; [ 4 | libgcc.lib 5 | libunwind 6 | openssl 7 | postgresql.lib 8 | sqlite.out 9 | ]; 10 | closure = with pkgs; builtins.toString (lib.strings.splitString "\n" (builtins.readFile "${closureInfo {rootPaths = packages;}}/store-paths")); 11 | in 12 | pkgs.buildEnv { 13 | name = "closure"; 14 | paths = []; 15 | buildInputs = packages; 16 | postBuild = "tar -zcf $out/closure.tar.gz ${closure}"; 17 | } 18 | -------------------------------------------------------------------------------- /blade/memdump/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "memdump", 5 | srcs = glob(["**/*.rs"]), 6 | visibility = ["//visibility:public"], 7 | deps = [ 8 | "@crate//:anyhow", 9 | "@crate//:tempdir", 10 | "@crate//:tikv-jemalloc-ctl", 11 | "@crate//:tikv-jemallocator", 12 | "@crate//:tokio", 13 | ], 14 | ) 15 | 16 | rust_test( 17 | name = "memdump_test", 18 | size = "small", 19 | crate = ":memdump", 20 | ) 21 | -------------------------------------------------------------------------------- /blade/metrics/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | load("@rules_rust_prost//:defs.bzl", "rust_prost_library") 3 | 4 | rust_library( 5 | name = "metrics", 6 | srcs = [ 7 | "lib.rs", 8 | ], 9 | visibility = ["//visibility:public"], 10 | deps = [ 11 | "@crate//:anyhow", 12 | "@crate//:lazy_static", 13 | "@crate//:prometheus-client", 14 | ], 15 | ) 16 | 17 | rust_test( 18 | name = "metrics_test", 19 | size = "small", 20 | crate = ":metrics", 21 | ) 22 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-06-30-075408_outputindex/up.sql: -------------------------------------------------------------------------------- 1 | CREATE INDEX IF NOT EXISTS invocationoutput_inv_id ON invocationoutput (invocation_id); 2 | 3 | -- For SQLite: 4 | -- CREATE INDEX IF NOT EXISTS idx_invocationoutput_invocation_id ON invocationoutput (invocation_id); 5 | 6 | -- For MySQL: 7 | -- CREATE INDEX idx_invocationoutput_invocation_id ON invocationoutput (invocation_id); 8 | -- MySQL's CREATE INDEX does not have IF NOT EXISTS; it will error if the index exists. 9 | -- You typically rely on the migration system to ensure it's only run once. 10 | -------------------------------------------------------------------------------- /blade/bep/protos/descriptors.bzl: -------------------------------------------------------------------------------- 1 | """Rule to get all transitive descriptors for a proto""" 2 | 3 | def _impl(ctx): 4 | pi = ctx.attr.proto[ProtoInfo] 5 | return [DefaultInfo( 6 | files = depset( 7 | [pi.direct_descriptor_set], 8 | transitive = [pi.transitive_descriptor_sets], 9 | ), 10 | )] 11 | 12 | transitive_proto_descriptors = rule( 13 | implementation = _impl, 14 | attrs = { 15 | "proto": attr.label( 16 | mandatory = True, 17 | providers = [ProtoInfo], 18 | ), 19 | }, 20 | ) 21 | -------------------------------------------------------------------------------- /blade/components/card.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use tailwindmerge::tailwind_merge; 3 | 4 | #[allow(non_snake_case)] 5 | #[component] 6 | pub fn Card( 7 | children: Children, 8 | #[prop(into, default = "".into())] class: Signal, 9 | ) -> impl IntoView { 10 | view! { 11 |
{children()}
15 | } 16 | } 17 | -------------------------------------------------------------------------------- /blade/components/list.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | 3 | #[allow(non_snake_case)] 4 | #[component] 5 | pub fn ListItem(children: Children, hide: Signal) -> impl IntoView { 6 | view! { 7 |
  • 8 | {children()} 9 |
  • 10 | } 11 | } 12 | 13 | #[allow(non_snake_case)] 14 | #[component] 15 | pub fn List(children: Children) -> impl IntoView { 16 | view! { 17 |
      18 | {children()} 19 |
    20 | } 21 | } 22 | -------------------------------------------------------------------------------- /postgres/harness/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "harness", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | data = ["@postgresql-bin//:bin"], 9 | visibility = ["//visibility:public"], 10 | deps = [ 11 | "@crate//:anyhow", 12 | "@crate__rustix-0.38.8//:rustix", 13 | "@rules_rust//tools/runfiles", 14 | ], 15 | ) 16 | 17 | rust_test( 18 | name = "harness_test", 19 | size = "small", 20 | crate = ":harness", 21 | deps = ["@crate//:tempdir"], 22 | ) 23 | -------------------------------------------------------------------------------- /blade/static/bazel.svg: -------------------------------------------------------------------------------- 1 | 2 | file_type_bazel -------------------------------------------------------------------------------- /blade/static/zoom-reset.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /blade/db/lib.rs: -------------------------------------------------------------------------------- 1 | use anyhow::anyhow; 2 | 3 | mod envscrub; 4 | mod exec; 5 | mod manager; 6 | mod postgres; 7 | mod sqlite; 8 | mod time; 9 | 10 | pub use exec::{run, run_group, transaction}; 11 | 12 | pub fn new(uri: &str) -> anyhow::Result> { 13 | if uri.starts_with("postgres://") { 14 | return Ok(std::sync::Arc::from(manager::PostgresManager::new(uri)?)); 15 | } 16 | if uri.starts_with("sqlite://") { 17 | return Ok(std::sync::Arc::from(manager::SqliteManager::new(uri)?)); 18 | } 19 | Err(anyhow!("unknown database implementation: {}", uri)) 20 | } 21 | -------------------------------------------------------------------------------- /nix/rust/rust_platform.nix: -------------------------------------------------------------------------------- 1 | let 2 | og = import {}; 3 | fenix = import {pkgs = og;}; 4 | pkgs = import { 5 | system = builtins.currentSystem; 6 | overlays = [ 7 | (self: super: { 8 | inherit fenix; 9 | }) 10 | ]; 11 | }; 12 | rust = with pkgs.fenix; 13 | with latest; 14 | combine [ 15 | cargo 16 | clippy 17 | rust-src 18 | rustc 19 | rustfmt 20 | rust-analyzer 21 | ]; 22 | rustPlatform = pkgs.makeRustPlatform { 23 | cargo = rust; 24 | rustc = rust; 25 | }; 26 | in { 27 | inherit pkgs rustPlatform; 28 | } 29 | -------------------------------------------------------------------------------- /blade/static/sort.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000200_fix_large_keyval_indexes/down.sql: -------------------------------------------------------------------------------- 1 | -- Recreate original btree indexes (may fail again if keyval entries exceed page limits) 2 | -- Provided for symmetry; consider leaving dropped in production if values remain large. 3 | DROP INDEX IF EXISTS options_keyval_prefix_idx; 4 | DROP INDEX IF EXISTS options_build_metadata_keyval_hash_idx; 5 | DROP INDEX IF EXISTS options_keyval_hash_idx; 6 | 7 | -- (Re-)create original indexes 8 | CREATE INDEX IF NOT EXISTS options_keyval_idx ON options (keyval); 9 | CREATE INDEX IF NOT EXISTS options_build_metadata_keyval_idx ON options (keyval) WHERE kind = 'Build Metadata'; 10 | -------------------------------------------------------------------------------- /blade/static/fail.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 8 | 10 | -------------------------------------------------------------------------------- /blade/components/measuretime.rs: -------------------------------------------------------------------------------- 1 | #[allow(unused_macros)] 2 | #[macro_export] 3 | macro_rules! measure_time { 4 | ($label:expr, $($code:tt)*) => {{ 5 | let start = web_sys::window() 6 | .and_then(|w| w.performance()) 7 | .map(|p| p.now()) 8 | .unwrap_or(0.0); 9 | 10 | let result = { $($code)* }; 11 | 12 | let end = web_sys::window() 13 | .and_then(|w| w.performance()) 14 | .map(|p| p.now()) 15 | .unwrap_or(0.0); 16 | 17 | let duration = end - start; 18 | web_sys::console::log_1(&format!("{}: {:.3}ms", $label, duration).into()); 19 | 20 | result 21 | }}; 22 | } 23 | -------------------------------------------------------------------------------- /nix/bazel/env.nix: -------------------------------------------------------------------------------- 1 | {pkgs ? import {}}: let 2 | bazelEnv = with pkgs; 3 | [ 4 | bash 5 | coreutils 6 | diffutils 7 | file 8 | findutils 9 | gawk 10 | gnugrep 11 | gnumake 12 | gnused 13 | gnutar 14 | gzip 15 | nix 16 | python3 17 | unzip 18 | which 19 | zip 20 | bintools 21 | (import ../cc/cc.nix {inherit pkgs;}) 22 | ] 23 | ++ ( 24 | if pkgs.stdenv.isDarwin 25 | then [pkgs.apple-sdk pkgs.xcbuild] 26 | else [] 27 | ); 28 | in 29 | pkgs.buildEnv { 30 | name = "bazel-env"; 31 | paths = bazelEnv; 32 | pathsToLink = ["/bin"]; 33 | } 34 | -------------------------------------------------------------------------------- /blade/static/skip.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /blade/darkmode/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library") 2 | 3 | rust_library( 4 | name = "darkmode", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | crate_features = select({ 9 | "@platforms//cpu:wasm32": ["hydrate"], 10 | "//conditions:default": ["ssr"], 11 | }), 12 | visibility = ["//visibility:public"], 13 | deps = select({ 14 | "@platforms//cpu:wasm32": [ 15 | "@wasm_crate//:cfg-if", 16 | "@wasm_crate//:leptos", 17 | "@wasm_crate//:web-sys", 18 | ], 19 | "//conditions:default": [ 20 | "@crate//:cfg-if", 21 | "@crate//:leptos", 22 | "@crate//:web-sys", 23 | ], 24 | }), 25 | ) 26 | -------------------------------------------------------------------------------- /blade/trace_event_parser/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | rust_library( 6 | name = "trace_event_parser", 7 | srcs = ["lib.rs"], 8 | edition = "2021", 9 | deps = select({ 10 | "@platforms//cpu:wasm32": [ 11 | "@wasm_crate//:serde", 12 | "@wasm_crate//:serde_json", 13 | ], 14 | "//conditions:default": [ 15 | "@crate//:serde", 16 | "@crate//:serde_json", 17 | ], 18 | }), 19 | ) 20 | 21 | rust_test( 22 | name = "trace_event_parser_test", 23 | size = "small", 24 | compile_data = glob(["testdata/**"]), 25 | crate = ":trace_event_parser", 26 | ) 27 | -------------------------------------------------------------------------------- /blade/bytestream/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "bytestream", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | visibility = ["//visibility:public"], 9 | deps = [ 10 | "//blade/bytestream/proto:bytestream_proto", 11 | "@crate//:anyhow", 12 | "@crate//:async-stream", 13 | "@crate//:futures", 14 | "@crate//:log", 15 | "@crate//:prost", 16 | "@crate//:prost-types", 17 | "@crate//:tokio", 18 | "@crate//:tokio-stream", 19 | "@crate//:tonic", 20 | "@crate//:url", 21 | ], 22 | ) 23 | 24 | rust_test( 25 | name = "bytestream_test", 26 | size = "small", 27 | crate = ":bytestream", 28 | ) 29 | -------------------------------------------------------------------------------- /.bazelrc: -------------------------------------------------------------------------------- 1 | # Import bazelrc presets 2 | import %workspace%/.bazel-lib/bazelrc/convenience.bazelrc 3 | import %workspace%/.bazel-lib/bazelrc/correctness.bazelrc 4 | import %workspace%/.bazel-lib/bazelrc/performance.bazelrc 5 | import %workspace%/.bazel-lib/bazelrc/nix-toolchains.bazelrc 6 | import %workspace%/.bazel-lib/bazelrc/clippy.bazelrc 7 | import %workspace%/.bazel-lib/bazelrc/osx.bazelrc 8 | import %workspace%/.bazel-lib/bazelrc/release.bazelrc 9 | 10 | 11 | common --incompatible_enable_proto_toolchain_resolution 12 | common --enable_workspace 13 | # Enable tracking of source directories to silence warnings about libunwind. 14 | startup --host_jvm_args=-DBAZEL_TRACK_SOURCE_DIRECTORIES=1 15 | 16 | try-import %workspace%/.bazelenvrc 17 | try-import %workspace%/user.bazelrc 18 | -------------------------------------------------------------------------------- /blade/static/copy.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 8 | -------------------------------------------------------------------------------- /tools/streamstress/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_binary") 2 | 3 | rust_binary( 4 | name = "streamstress", 5 | srcs = [ 6 | "main.rs", 7 | ], 8 | visibility = ["//visibility:public"], 9 | deps = [ 10 | "//blade/bep/protos:bep_proto", 11 | "//blade/bep/protos:build_event_stream_rust_proto", 12 | "@crate//:anyhow", 13 | "@crate//:clap", 14 | "@crate//:futures", 15 | "@crate//:prost", 16 | "@crate//:prost-reflect", 17 | "@crate//:prost-types", 18 | "@crate//:tokio", 19 | "@crate//:tokio-stream", 20 | "@crate//:tonic", 21 | "@crate//:tracing", 22 | "@crate//:tracing-subscriber", 23 | "@crate//:uuid", 24 | ], 25 | ) 26 | -------------------------------------------------------------------------------- /blade/static/code.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /blade/static/history.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /blade/components/clipboard.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use tailwindmerge::tailwind_merge; 3 | 4 | #[component] 5 | pub fn CopyToClipboard( 6 | #[prop(into)] text: Signal, 7 | #[prop(into, default = "".into())] class: Signal, 8 | ) -> impl IntoView { 9 | view! { 10 | 11 | 21 | 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /blade/static/shard.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /blade/static/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust_wasm_bindgen//rules_js:defs.bzl", "js_rust_wasm_bindgen") 2 | load("//nix/bazel:flatten.bzl", "flatten") 3 | load("//tailwindcss:tailwindcss.bzl", "tailwindcss") 4 | 5 | tailwindcss( 6 | name = "style", 7 | src = "tailwind.css", 8 | target = "//blade:blade.wasm", 9 | ) 10 | 11 | filegroup( 12 | name = "static_files", 13 | srcs = glob( 14 | ["*"], 15 | exclude = ["BUILD.bazel"], 16 | ) + [":style"], 17 | visibility = ["//visibility:public"], 18 | ) 19 | 20 | js_rust_wasm_bindgen( 21 | name = "blade_wasm", 22 | target = "web", 23 | wasm_file = "//blade:blade.wasm", 24 | ) 25 | 26 | flatten( 27 | name = "static", 28 | srcs = [ 29 | ":blade_wasm", 30 | ":static_files", 31 | ], 32 | visibility = ["//visibility:public"], 33 | ) 34 | -------------------------------------------------------------------------------- /blade/prototime/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | load("@rules_rust_prost//:defs.bzl", "rust_prost_library") 3 | 4 | rust_prost_library( 5 | name = "timestamp_proto", 6 | proto = "@protobuf//:timestamp_proto", 7 | visibility = ["//visibility:public"], 8 | ) 9 | 10 | rust_prost_library( 11 | name = "duration_proto", 12 | proto = "@protobuf//:duration_proto", 13 | visibility = ["//visibility:public"], 14 | ) 15 | 16 | rust_library( 17 | name = "prototime", 18 | srcs = [ 19 | "lib.rs", 20 | ], 21 | visibility = ["//visibility:public"], 22 | deps = [ 23 | ":duration_proto", 24 | ":timestamp_proto", 25 | "@crate//:anyhow", 26 | ], 27 | ) 28 | 29 | rust_test( 30 | name = "prototime_test", 31 | size = "small", 32 | crate = ":prototime", 33 | ) 34 | -------------------------------------------------------------------------------- /blade/static/zoom-out.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 11 | 12 | -------------------------------------------------------------------------------- /blade/static/disconnect.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000100_add_trgm/up.sql: -------------------------------------------------------------------------------- 1 | -- Enable trigram matching extension (safe to IF NOT EXISTS on modern PG versions >=13) 2 | CREATE EXTENSION IF NOT EXISTS pg_trgm; 3 | 4 | -- GIN trigram indexes to accelerate substring / ILIKE searches: 5 | -- 1. tests.name: used in LIKE '%pattern%' search in search_test_names. 6 | CREATE INDEX IF NOT EXISTS tests_name_trgm_idx ON tests USING GIN (name gin_trgm_ops); 7 | 8 | -- 2. invocationoutput.line: used for log output filters with ILIKE / Contains. 9 | CREATE INDEX IF NOT EXISTS invocationoutput_line_trgm_idx ON invocationoutput USING GIN (line gin_trgm_ops); 10 | 11 | -- 3. options.keyval: used for Bazel flag / metadata contains searches (LIKE / ILIKE patterns). 12 | CREATE INDEX IF NOT EXISTS options_keyval_trgm_idx ON options USING GIN (keyval gin_trgm_ops); 13 | 14 | -- Note: retain existing btree indexes for equality and ordering; GIN complements them for pattern scans. 15 | -------------------------------------------------------------------------------- /blade/bep/proto_registry/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "proto_registry", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | data = [ 9 | "//blade/bep/protos:bep_descriptors", 10 | "//blade/bep/protos:bes_descriptors", 11 | "@googleapis//google/bytestream:bytestream_proto", 12 | ], 13 | visibility = ["//visibility:public"], 14 | deps = [ 15 | "@crate//:anyhow", 16 | "@crate//:lazy_static", 17 | "@crate//:prost", 18 | "@crate//:prost-reflect", 19 | "@crate//:prost-types", 20 | "@crate//:walkdir", 21 | "@rules_rust//tools/runfiles", 22 | ], 23 | ) 24 | 25 | rust_test( 26 | name = "proto_registry_test", 27 | size = "small", 28 | crate = ":proto_registry", 29 | deps = [ 30 | "//blade/bep/protos:build_event_stream_rust_proto", 31 | "@crate//:serde_json", 32 | ], 33 | ) 34 | -------------------------------------------------------------------------------- /blade/static/zoom-in.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 8 | 12 | 13 | -------------------------------------------------------------------------------- /third_party/nix/jemalloc/BUILD.jemalloc: -------------------------------------------------------------------------------- 1 | load("@rules_cc//cc:defs.bzl", "cc_library") 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | filegroup( 6 | name = "bin", 7 | srcs = glob( 8 | ["bin/*"], 9 | allow_empty = True, 10 | ), 11 | ) 12 | 13 | filegroup( 14 | name = "lib", 15 | srcs = glob( 16 | [ 17 | "lib/**/*.so*", 18 | "lib/**/*.dylib", 19 | "lib/**/*.a", 20 | ], 21 | allow_empty = True, 22 | ), 23 | ) 24 | 25 | filegroup( 26 | name = "include", 27 | srcs = glob( 28 | [ 29 | "include/**/*.h", 30 | "include/**/*.hh", 31 | "include/**/*.hpp", 32 | "include/**/*.hxx", 33 | ], 34 | allow_empty = True, 35 | ), 36 | ) 37 | 38 | cc_library( 39 | name = "jemalloc", 40 | srcs = ["lib/libjemalloc_pic.a"], 41 | hdrs = [":include"], 42 | strip_include_prefix = "include", 43 | visibility = ["//visibility:public"], 44 | ) 45 | -------------------------------------------------------------------------------- /third_party/nix/cargo-bazel/default.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | stdenv, 4 | fetchFromGitHub, 5 | rustPlatform, 6 | }: 7 | rustPlatform.buildRustPackage rec { 8 | pname = "cargo-bazel"; 9 | version = "0.17.0"; 10 | 11 | src = fetchFromGitHub { 12 | owner = "bazelbuild"; 13 | repo = "rules_rust"; 14 | rev = "0.65.0"; 15 | hash = "sha256-Q1FenqIxp2Qy++cSu5RFdGzrQmqbCfKRZB1VR97v77Q="; 16 | }; 17 | 18 | cargoHash = "sha256-beOFmmeAK2cNANxacv4GfJqEptvqD1/CNJ+Mmunb7/Y="; 19 | 20 | sourceRoot = "source/crate_universe"; 21 | 22 | doCheck = false; 23 | 24 | buildNoDefaultFeatures = true; 25 | 26 | buildFeatures = ["cargo"]; 27 | 28 | meta = { 29 | description = "A collection of tools which use Cargo to generate build targets for Bazel."; 30 | homepage = "https://github.com/bazelbuild/rules_rust"; 31 | changelog = "https://github.com/bazelbuild/rules_rust/releases/tag/v${version}"; 32 | license = with lib.licenses; [ 33 | asl20 34 | ]; 35 | mainProgram = "cargo-bazel"; 36 | }; 37 | } 38 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/nix-toolchains.bazelrc: -------------------------------------------------------------------------------- 1 | # Enable rules-nixpkgs toolchain 2 | build --host_platform=@io_tweag_rules_nixpkgs//nixpkgs/platforms:host 3 | 4 | # Use nix-based CC 5 | build --cxxopt='-std=c++20' 6 | build --host_cxxopt='-std=c++20' 7 | build --extra_toolchains=//nix/cc:nix_cc_toolchain 8 | build --incompatible_enable_cc_toolchain_resolution 9 | 10 | # Use nix-based JVM 11 | build --host_platform=@io_tweag_rules_nixpkgs//nixpkgs/platforms:host 12 | build --java_runtime_version=nixpkgs_java_11 13 | build --tool_java_runtime_version=nixpkgs_java_11 14 | build --java_language_version=11 15 | build --tool_java_language_version=11 16 | 17 | # Ensure that we don't accidentally build protobuf or gRPC 18 | common --per_file_copt=external/.*protobuf.*@--PROTOBUF_WAS_NOT_SUPPOSED_TO_BE_BUILT 19 | common --host_per_file_copt=external/.*protobuf.*@--PROTOBUF_WAS_NOT_SUPPOSED_TO_BE_BUILT 20 | common --per_file_copt=external/.*grpc.*@--GRPC_WAS_NOT_SUPPOSED_TO_BE_BUILT 21 | common --host_per_file_copt=external/.*grpc.*@--GRPC_WAS_NOT_SUPPOSED_TO_BE_BUILT 22 | -------------------------------------------------------------------------------- /blade/shared/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library") 2 | 3 | rust_library( 4 | name = "shared", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | crate_features = select({ 9 | "@platforms//cpu:wasm32": ["hydrate"], 10 | "//conditions:default": ["ssr"], 11 | }), 12 | rustc_env = { 13 | "SERVER_FN_OVERRIDE_KEY": "bazel", 14 | }, 15 | visibility = ["//visibility:public"], 16 | deps = select({ 17 | "@platforms//cpu:wasm32": [ 18 | "//blade/darkmode", 19 | "//blade/state", 20 | "@wasm_crate//:cfg-if", 21 | "@wasm_crate//:leptos", 22 | "@wasm_crate//:url", 23 | "@wasm_crate//:web-sys", 24 | ], 25 | "//conditions:default": [ 26 | "//blade/bytestream", 27 | "//blade/darkmode", 28 | "//blade/state", 29 | "@crate//:cfg-if", 30 | "@crate//:leptos", 31 | "@crate//:url", 32 | "@crate//:web-sys", 33 | ], 34 | }), 35 | ) 36 | -------------------------------------------------------------------------------- /blade/state/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library") 2 | 3 | rust_library( 4 | name = "state", 5 | srcs = [ 6 | "lib.rs", 7 | ], 8 | crate_features = select({ 9 | "@platforms//cpu:wasm32": [], 10 | "//conditions:default": ["ssr"], 11 | }), 12 | proc_macro_deps = select({ 13 | "@platforms//cpu:wasm32": [], 14 | "//conditions:default": ["@crate//:derivative"], 15 | }), 16 | rustc_env = { 17 | "SERVER_FN_OVERRIDE_KEY": "bazel", 18 | }, 19 | visibility = ["//visibility:public"], 20 | deps = select({ 21 | "@platforms//cpu:wasm32": [ 22 | "@wasm_crate//:cfg-if", 23 | "@wasm_crate//:futures", 24 | "@wasm_crate//:serde", 25 | ], 26 | "//conditions:default": [ 27 | "//blade/bytestream", 28 | "@crate//:anyhow", 29 | "@crate//:cfg-if", 30 | "@crate//:futures", 31 | "@crate//:serde", 32 | "@crate//:tokio", 33 | ], 34 | }), 35 | ) 36 | -------------------------------------------------------------------------------- /third_party/nix/leptosfmt/default.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | stdenv, 4 | rustPlatform, 5 | fetchFromGitHub, 6 | }: 7 | rustPlatform.buildRustPackage rec { 8 | pname = "leptosfmt"; 9 | version = "8b4194ba33eee417ababdd15498940014fd6d237"; 10 | 11 | src = fetchFromGitHub { 12 | owner = "bram209"; 13 | repo = "leptosfmt"; 14 | rev = "8b4194ba33eee417ababdd15498940014fd6d237"; 15 | hash = "sha256-F06Ag99rCn3qZywdxyP7ULOgyhbSzWNe+drBDZJWVxo="; 16 | fetchSubmodules = true; 17 | }; 18 | 19 | cargoHash = "sha256-ihhEeOLNTHi0C8rGIvwiXJRiqIjWGTRRr7JLn6fMtNU="; 20 | 21 | RUSTFLAGS = lib.optionalString stdenv.isLinux "-C linker-features=-lld"; 22 | 23 | meta = with lib; { 24 | description = "Formatter for the leptos view! macro"; 25 | mainProgram = "leptosfmt"; 26 | homepage = "https://github.com/bram209/leptosfmt"; 27 | changelog = "https://github.com/bram209/leptosfmt/blob/${src.rev}/CHANGELOG.md"; 28 | license = with licenses; [ 29 | asl20 30 | mit 31 | ]; 32 | maintainers = with maintainers; [figsoda]; 33 | }; 34 | } 35 | -------------------------------------------------------------------------------- /blade/components/dashboard/pass_fail_chart.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use state::{Status, TestHistory}; 3 | 4 | use crate::charts::piechart::PieChart; 5 | // use chrono::prelude::*; 6 | 7 | #[allow(non_snake_case)] 8 | #[component] 9 | pub fn PassFailChart(history: TestHistory) -> impl IntoView { 10 | let (pass, fail): (Vec<_>, Vec<_>) = history 11 | .history 12 | .into_iter() 13 | .partition(|p| matches!(p.test.status, Status::Success)); 14 | let passed = pass.len(); 15 | let failed = fail.len(); 16 | 17 | view! { 18 | 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /blade/static/spinner.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/convenience.bazelrc: -------------------------------------------------------------------------------- 1 | # Output test errors to stderr so users don't have to `cat` or open test failure log files when test 2 | # fail. This makes the log noiser in exchange for reducing the time-to-feedback on test failures for 3 | # users. 4 | # Docs: https://bazel.build/docs/user-manual#test-output 5 | test --test_output=errors 6 | 7 | # Show the output files created by builds that requested more than one target. This helps users 8 | # locate the build outputs in more cases 9 | # Docs: https://bazel.build/docs/user-manual#show-result 10 | build --show_result=20 11 | 12 | # Bazel picks up host-OS-specific config lines from bazelrc files. For example, if the host OS is 13 | # Linux and you run bazel build, Bazel picks up lines starting with build:linux. Supported OS 14 | # identifiers are `linux`, `macos`, `windows`, `freebsd`, and `openbsd`. Enabling this flag is 15 | # equivalent to using `--config=linux` on Linux, `--config=windows` on Windows, etc. 16 | # Docs: https://bazel.build/reference/command-line-reference#flag--enable_platform_specific_config 17 | common --enable_platform_specific_config 18 | -------------------------------------------------------------------------------- /third_party/nix/BUILD.protobuf: -------------------------------------------------------------------------------- 1 | load("@rules_proto//proto:defs.bzl", "proto_library") 2 | 3 | GOOGLE_PROTOBUF_DEP_EDGES = { 4 | "any": [], 5 | "api": [ 6 | "source_context", 7 | "type", 8 | ], 9 | "compiler/plugin": ["descriptor"], 10 | "descriptor": [], 11 | "duration": [], 12 | "empty": [], 13 | "field_mask": [], 14 | "source_context": [], 15 | "struct": [], 16 | "timestamp": [], 17 | "type": [ 18 | "any", 19 | "source_context", 20 | ], 21 | "wrappers": [], 22 | } 23 | 24 | genrule( 25 | name = "protoc", 26 | srcs = ["bin/protoc"], 27 | outs = ["protoc.bin"], 28 | cmd = "cp $< $@", 29 | executable = True, 30 | visibility = ["//visibility:public"], 31 | ) 32 | 33 | [ 34 | proto_library( 35 | name = "%s_proto" % src, 36 | srcs = ["include/google/protobuf/%s.proto" % src], 37 | strip_import_prefix = "include", 38 | visibility = ["//visibility:public"], 39 | deps = [":%s_proto" % dep for dep in deps], 40 | ) 41 | for src, deps in GOOGLE_PROTOBUF_DEP_EDGES.items() 42 | ] 43 | -------------------------------------------------------------------------------- /blade/db/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "db", 5 | srcs = glob(["**/*.rs"]), 6 | data = glob([ 7 | "sqlite/migrations/**", 8 | "postgres/migrations/**", 9 | ]), 10 | visibility = ["//visibility:public"], 11 | deps = [ 12 | "//blade/metrics", 13 | "//blade/prototime", 14 | "//blade/state", 15 | "@crate//:anyhow", 16 | "@crate//:diesel", 17 | "@crate//:diesel-tracing", 18 | "@crate//:diesel_migrations", 19 | "@crate//:lazy_static", 20 | "@crate//:prometheus-client", 21 | "@crate//:r2d2", 22 | "@crate//:serde", 23 | "@crate//:time", 24 | "@crate//:tokio", 25 | "@crate//:tracing", 26 | "@crate//:uuid", 27 | "@postgresql", 28 | "@rules_rust//tools/runfiles", 29 | "@sqlite", 30 | ], 31 | ) 32 | 33 | rust_test( 34 | name = "db_test", 35 | size = "small", 36 | crate = ":db", 37 | deps = [ 38 | "//postgres/harness", 39 | "@crate//:tempdir", 40 | ], 41 | ) 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 DolceTriade 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /blade/lib.rs: -------------------------------------------------------------------------------- 1 | #![recursion_limit = "256"] 2 | use cfg_if::cfg_if; 3 | 4 | // Needs to be in lib.rs AFAIK because wasm-bindgen needs us to be compiling a 5 | // lib. I may be wrong. 6 | cfg_if! { 7 | if #[cfg(feature = "hydrate")] { 8 | use wasm_bindgen::prelude::wasm_bindgen; 9 | use tracing_web::{MakeWebConsoleWriter}; 10 | use tracing_subscriber::prelude::*; 11 | use routes::app::App; 12 | 13 | #[wasm_bindgen] 14 | pub fn hydrate() { 15 | console_error_panic_hook::set_once(); 16 | let fmt_layer = tracing_subscriber::fmt::layer() 17 | .with_file(true) 18 | .with_line_number(true) 19 | .with_ansi(false) // Only partially supported across browsers 20 | .without_time() // std::time is not available in browsers, see note below 21 | .with_writer(MakeWebConsoleWriter::new()); // write events to the console 22 | tracing_subscriber::registry() 23 | .with(fmt_layer) 24 | .init(); 25 | leptos::mount::hydrate_body(App); 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /blade/components/dashboard/graphs.rs: -------------------------------------------------------------------------------- 1 | use leptos::{either::Either, prelude::*}; 2 | use state::TestHistory; 3 | 4 | use crate::dashboard::{duration_chart::DurationChart, pass_fail_chart::PassFailChart}; 5 | 6 | #[allow(non_snake_case)] 7 | #[component] 8 | pub fn HistoryGraphs(history: TestHistory) -> impl IntoView { 9 | if history.history.is_empty() { 10 | return Either::Left( 11 | view! {

    "No history found for this test."

    }, 12 | ); 13 | } 14 | 15 | Either::Right(view! { 16 |
    17 |
    18 |

    "Pass/Fail History"

    19 | 20 |
    21 |
    22 |

    "Duration History (s)"

    23 | 24 |
    25 |
    26 | }) 27 | } 28 | -------------------------------------------------------------------------------- /blade/db/envscrub.rs: -------------------------------------------------------------------------------- 1 | #[allow(dead_code)] 2 | pub(crate) fn scrub(s: &str) -> String { 3 | if !s.starts_with("--client_env=") { 4 | return s.to_string(); 5 | } 6 | let Some(first) = s[0..].find('=') else { 7 | return s.to_string(); 8 | }; 9 | if first + 1 >= s.len() { 10 | return s.to_string(); 11 | } 12 | let Some(second) = s[first + 1..].find('=') else { 13 | return s.to_string(); 14 | }; 15 | if first + second + 2 >= s.len() { 16 | return s.to_string(); 17 | } 18 | format!("{}=", &s[..first + second + 1]) 19 | } 20 | 21 | #[cfg(test)] 22 | mod tests { 23 | use super::*; 24 | 25 | #[test] 26 | fn test_scrub() { 27 | assert_eq!(scrub("foo=bar"), "foo=bar"); 28 | assert_eq!(scrub("foo=bar=baz=qux"), "foo=bar=baz=qux"); 29 | assert_eq!(scrub(""), ""); 30 | assert_eq!(scrub("yo"), "yo"); 31 | assert_eq!(scrub("foo="), "foo="); 32 | assert_eq!(scrub("foo=bar="), "foo=bar="); 33 | assert_eq!( 34 | scrub("--client_env=foo=bar="), 35 | "--client_env=foo=" 36 | ); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /blade/static/number.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | number_sign [#110] 6 | Created with Sketch. 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /nix/rust/wasm_bindgen.nix: -------------------------------------------------------------------------------- 1 | let 2 | og = import {}; 3 | fenix = import {pkgs = og;}; 4 | pkgs = import { 5 | system = builtins.currentSystem; 6 | overlays = [ 7 | (self: super: { 8 | inherit fenix; 9 | }) 10 | ]; 11 | }; 12 | rust = with pkgs.fenix; 13 | with latest; 14 | combine [ 15 | cargo 16 | clippy 17 | rust-src 18 | rustc 19 | rustfmt 20 | targets.wasm32-unknown-unknown.latest.rust-std 21 | ]; 22 | rustPlatform = pkgs.makeRustPlatform { 23 | cargo = rust; 24 | rustc = rust; 25 | }; 26 | wasm-bindgen-cli' = pkgs.wasm-bindgen-cli.override { 27 | inherit rustPlatform; 28 | }; 29 | in 30 | wasm-bindgen-cli'.overrideAttrs (self: super: rec { 31 | version = "0.2.100"; 32 | src = with self; 33 | pkgs.fetchCrate { 34 | inherit pname; 35 | version = "0.2.100"; 36 | hash = "sha256-3RJzK7mkYFrs7C/WkhW9Rr4LdP5ofb2FdYGz1P7Uxog="; 37 | }; 38 | cargoDeps = super.cargoDeps.overrideAttrs (_: { 39 | inherit src; 40 | outputHash = "sha256-/8T0FGhPMQoUM5/M8lZkTGAc9ul+/Xe59xe0Z/l/RsI="; 41 | }); 42 | doCheck = false; 43 | }) 44 | -------------------------------------------------------------------------------- /nix/bazel/flatten.bzl: -------------------------------------------------------------------------------- 1 | """ 2 | Flatten is a helper to take a list of inputs (potentially in many directories), and output into a single directory. 3 | """ 4 | 5 | def _flatten_impl(ctx): 6 | out = ctx.label.name 7 | if ctx.attr.outdir: 8 | out = ctx.attr.outdir 9 | d = ctx.actions.declare_directory(out) 10 | ctx.actions.run_shell( 11 | outputs = [d], 12 | inputs = ctx.files.srcs, 13 | arguments = [d.path] + [x.path for x in ctx.files.srcs], 14 | mnemonic = "Flatten", 15 | command = """ 16 | out=$1 17 | mkdir -p $out 18 | shift 19 | while (( $# )); do 20 | cp $1 $out 21 | shift 22 | done 23 | """, 24 | ) 25 | return DefaultInfo(files = depset([d])) 26 | 27 | flatten = rule( 28 | implementation = _flatten_impl, 29 | attrs = { 30 | "outdir": attr.string( 31 | default = "", 32 | doc = "Output directory. If empty, uses the label name.", 33 | ), 34 | "srcs": attr.label_list( 35 | allow_files = True, 36 | doc = "List of inputs to flatten.", 37 | ), 38 | }, 39 | ) 40 | -------------------------------------------------------------------------------- /third_party/rust/patches/prost-build/0001-Allow-substitution-for-the-message-type-in-type-attr.patch: -------------------------------------------------------------------------------- 1 | From b3a9f8a9cd4bf4113d6477ae6e6ed74d4667707b Mon Sep 17 00:00:00 2001 2 | From: Harsh Modi 3 | Date: Fri, 24 Nov 2023 12:16:03 -0800 4 | Subject: [PATCH] Allow substitution for the message type in type attributes 5 | 6 | --- 7 | src/code_generator.rs | 6 +++++- 8 | 1 file changed, 5 insertions(+), 1 deletion(-) 9 | 10 | diff --git a/src/code_generator.rs b/src/code_generator.rs 11 | index 2a4d241..e7ea3c2 100644 12 | --- a/src/code_generator.rs 13 | +++ b/src/code_generator.rs 14 | @@ -266,7 +266,11 @@ impl<'a> CodeGenerator<'a> { 15 | assert_eq!(b'.', fq_message_name.as_bytes()[0]); 16 | for attribute in self.config.type_attributes.get(fq_message_name) { 17 | push_indent(self.buf, self.depth); 18 | - self.buf.push_str(attribute); 19 | + if attribute.contains("{MESSAGE}") { 20 | + self.buf.push_str(&attribute.replace("{MESSAGE}", fq_message_name)); 21 | + } else { 22 | + self.buf.push_str(attribute); 23 | + } 24 | self.buf.push('\n'); 25 | } 26 | } 27 | -- 28 | 2.39.2 (Apple Git-143) 29 | 30 | -------------------------------------------------------------------------------- /nix/rust/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:toolchain.bzl", "rust_analyzer_toolchain", "rustfmt_toolchain") 2 | load("@rules_rust_wasm_bindgen//:defs.bzl", "rust_wasm_bindgen_toolchain") 3 | 4 | rust_wasm_bindgen_toolchain( 5 | name = "wasm_bindgen_toolchain_impl", 6 | wasm_bindgen_cli = "@wasm-bindgen-cli", 7 | ) 8 | 9 | toolchain( 10 | name = "wasm_bindgen_toolchain", 11 | toolchain = "wasm_bindgen_toolchain_impl", 12 | toolchain_type = "@rules_rust_wasm_bindgen//:toolchain_type", 13 | ) 14 | 15 | rust_analyzer_toolchain( 16 | name = "rust_analyzer_toolchain_impl", 17 | proc_macro_srv = "@nix_rust//:rust-analyzer-proc-macro-srv", 18 | rustc = "@nix_rust//:rustc", 19 | rustc_srcs = "@nix_rust//rustc_src", 20 | ) 21 | 22 | toolchain( 23 | name = "rust_analyzer_toolchain", 24 | toolchain = "rust_analyzer_toolchain_impl", 25 | toolchain_type = "@rules_rust//rust/rust_analyzer:toolchain_type", 26 | ) 27 | 28 | rustfmt_toolchain( 29 | name = "rustfmt_toolchain_impl", 30 | rustc = "@nix_rust//:rustc", 31 | rustc_lib = "@nix_rust//:rustc_lib", 32 | rustfmt = "@nix_rust//:rustfmt", 33 | ) 34 | 35 | toolchain( 36 | name = "rustfmt_toolchain", 37 | toolchain = "rustfmt_toolchain_impl", 38 | toolchain_type = "@rules_rust//rust/rustfmt:toolchain_type", 39 | ) 40 | -------------------------------------------------------------------------------- /blade/bep/buildtoollogs.rs: -------------------------------------------------------------------------------- 1 | use build_event_stream_proto::build_event_stream; 2 | 3 | use crate::EventHandler; 4 | 5 | pub struct Handler {} 6 | 7 | impl EventHandler for Handler { 8 | fn handle_event( 9 | &self, 10 | db_mgr: &dyn state::DBManager, 11 | invocation_id: &str, 12 | event: &build_event_stream::BuildEvent, 13 | ) -> anyhow::Result<()> { 14 | if let Some(build_event_stream::build_event::Payload::BuildToolLogs(logs)) = &event.payload 15 | { 16 | // Look for command.profile.gz in the log files 17 | for log in &logs.log { 18 | if log.name == "command.profile.gz" 19 | && let Some(build_event_stream::file::File::Uri(uri)) = &log.file 20 | { 21 | // Update the invocation with the profile URI 22 | let mut db = db_mgr.get()?; 23 | let uri2 = uri.clone(); 24 | db.update_shallow_invocation( 25 | invocation_id, 26 | Box::new(move |i: &mut state::InvocationResults| { 27 | i.profile_uri = Some(uri2); 28 | Ok(()) 29 | }), 30 | )?; 31 | break; 32 | } 33 | } 34 | } 35 | Ok(()) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /third_party/rust/patches/rules_rust/0001-Revert-Update-runfiles-to-work-with-directory-embedd.patch: -------------------------------------------------------------------------------- 1 | From 621bd86d299b5ae0b9a2f2941ad93e6ff0724cb5 Mon Sep 17 00:00:00 2001 2 | From: DolceTriade 3 | Date: Sat, 3 May 2025 23:31:21 -0700 4 | Subject: [PATCH] Revert "Update runfiles to work with directory embedded 5 | manifests (#3399)" 6 | 7 | This reverts commit 8de8f2b89f71819c1d226ff0b87a62bdb3f13c32. 8 | --- 9 | rust/runfiles/runfiles.rs | 7 +------ 10 | 1 file changed, 1 insertion(+), 6 deletions(-) 11 | 12 | diff --git a/rust/runfiles/runfiles.rs b/rust/runfiles/runfiles.rs 13 | index e0568ae7..fe8c6282 100644 14 | --- a/rust/runfiles/runfiles.rs 15 | +++ b/rust/runfiles/runfiles.rs 16 | @@ -158,12 +158,7 @@ impl Runfiles { 17 | let mode = if let Some(manifest_file) = std::env::var_os(MANIFEST_FILE_ENV_VAR) { 18 | Self::create_manifest_based(Path::new(&manifest_file))? 19 | } else { 20 | - let dir = find_runfiles_dir()?; 21 | - let manifest_path = dir.join("MANIFEST"); 22 | - match manifest_path.exists() { 23 | - true => Self::create_manifest_based(&manifest_path)?, 24 | - false => Mode::DirectoryBased(dir), 25 | - } 26 | + Mode::DirectoryBased(find_runfiles_dir()?) 27 | }; 28 | 29 | let repo_mapping = raw_rlocation(&mode, "_repo_mapping") 30 | -- 31 | 2.49.0 32 | 33 | -------------------------------------------------------------------------------- /blade/darkmode/lib.rs: -------------------------------------------------------------------------------- 1 | // Dark mode utilities and state management 2 | use cfg_if::cfg_if; 3 | 4 | /// Dark mode state shared between components and routes 5 | #[derive(Clone, Copy, Debug)] 6 | pub struct DarkMode(pub bool); 7 | 8 | const BLADE_LOCALSTORAGE_KEY: &str = "blade_dark_mode"; 9 | 10 | #[cfg(feature = "hydrate")] 11 | use leptos::tachys::dom::window; 12 | 13 | fn storage() -> Option { 14 | cfg_if! { 15 | if #[cfg(feature = "hydrate")] { 16 | window().local_storage().ok().flatten() 17 | } else { 18 | None 19 | } 20 | } 21 | } 22 | 23 | fn is_system_dark_mode() -> bool { 24 | cfg_if! { 25 | if #[cfg(feature = "hydrate")] { 26 | window() 27 | .match_media("(prefers-color-scheme: dark)") 28 | .ok() 29 | .flatten() 30 | .map(|x| x.matches()) 31 | .unwrap_or(false) 32 | } else { 33 | false 34 | } 35 | } 36 | } 37 | 38 | pub fn get() -> bool { 39 | storage() 40 | .and_then(|storage| storage.get_item(BLADE_LOCALSTORAGE_KEY).ok().flatten()) 41 | .and_then(|v| v.parse().ok()) 42 | .unwrap_or_else(is_system_dark_mode) 43 | } 44 | 45 | pub fn set(is_dark: bool) -> Result<(), String> { 46 | storage() 47 | .ok_or("no storage")? 48 | .set_item(BLADE_LOCALSTORAGE_KEY, &is_dark.to_string()) 49 | .map_err(|e| format!("{e:?}")) 50 | } 51 | -------------------------------------------------------------------------------- /blade/metrics/lib.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Mutex; 2 | 3 | use anyhow::Context; 4 | use lazy_static::lazy_static; 5 | use prometheus_client::{encoding::text::encode, registry::Metric}; 6 | 7 | lazy_static! { 8 | static ref REGISTRY: Mutex = 9 | Mutex::new(prometheus_client::registry::Registry::default()); 10 | } 11 | 12 | pub fn register_metric(name: N, help: H, metric: M) -> M 13 | where 14 | N: Into, 15 | H: Into, 16 | M: Metric + Clone, 17 | { 18 | let mut r = REGISTRY.lock().unwrap(); 19 | r.register(name, help, metric.clone()); 20 | metric 21 | } 22 | 23 | pub fn openmetrics_string() -> anyhow::Result { 24 | let mut ret: String = "".to_string(); 25 | let r = REGISTRY.lock().unwrap(); 26 | encode(&mut ret, &r) 27 | .map(|_| ret) 28 | .context("failed to generate metrics") 29 | } 30 | 31 | #[cfg(test)] 32 | mod test { 33 | use crate::{openmetrics_string, register_metric}; 34 | 35 | #[test] 36 | fn test_register() { 37 | let c = register_metric( 38 | "metric", 39 | "help", 40 | prometheus_client::metrics::counter::Counter::::default(), 41 | ); 42 | let enc1 = openmetrics_string().unwrap(); 43 | assert!(enc1.contains("metric_total 0")); 44 | c.inc(); 45 | let enc2 = openmetrics_string().unwrap(); 46 | assert!(enc2.contains("metric_total 1")); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /blade/bep/print_event.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use build_event_stream_proto::build_event_stream; 4 | use prost_reflect::ReflectMessage; 5 | use regex::Regex; 6 | use state::DBManager; 7 | pub(crate) struct Handler { 8 | pub message_re: Arc>, 9 | } 10 | 11 | impl crate::EventHandler for Handler { 12 | fn handle_event( 13 | &self, 14 | _db_mgr: &dyn DBManager, 15 | _invocation_id: &str, 16 | event: &build_event_stream::BuildEvent, 17 | ) -> anyhow::Result<()> { 18 | let re = self.message_re.lock().unwrap(); 19 | if re.as_str().is_empty() { 20 | return Ok(()); 21 | } 22 | let desc = event.descriptor(); 23 | let mut dm: prost_reflect::DynamicMessage = event.transcode_to_dynamic(); 24 | let oneof = match desc.oneofs().next() { 25 | None => { 26 | return Ok(()); 27 | }, 28 | Some(o) => o, 29 | }; 30 | let _ = oneof.fields().try_for_each(|f| { 31 | if dm.has_field(&f) && re.is_match(f.field_descriptor_proto().type_name()) { 32 | let type_name = f.field_descriptor_proto().name(); 33 | dm.clear_field_by_name("children"); 34 | let j = serde_json::ser::to_string(&dm).map_err(|_| ())?; 35 | tracing::info!(type_name, "{}", j); 36 | return Err(()); 37 | } 38 | Ok(()) 39 | }); 40 | Ok(()) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-11-19-000000_unique_test_names/up.sql: -------------------------------------------------------------------------------- 1 | -- Create unique_test_names table to optimize autocomplete searches 2 | CREATE TABLE IF NOT EXISTS unique_test_names ( 3 | name TEXT NOT NULL PRIMARY KEY 4 | ); 5 | 6 | -- Create GIN index on name for LIKE '%pattern%' searches 7 | CREATE INDEX IF NOT EXISTS unique_test_names_trgm_idx ON unique_test_names USING GIN (name gin_trgm_ops); 8 | 9 | -- Create trigger function to maintain unique_test_names when tests table changes 10 | CREATE OR REPLACE FUNCTION maintain_unique_test_names() 11 | RETURNS TRIGGER AS $$ 12 | BEGIN 13 | IF TG_OP = 'INSERT' OR TG_OP = 'UPDATE' THEN 14 | -- Insert new test name if it doesn't exist 15 | INSERT INTO unique_test_names (name) 16 | VALUES (NEW.name) 17 | ON CONFLICT (name) DO NOTHING; 18 | RETURN NEW; 19 | ELSIF TG_OP = 'DELETE' THEN 20 | -- Remove test name only if no other tests have this name 21 | DELETE FROM unique_test_names 22 | WHERE name = OLD.name 23 | AND NOT EXISTS (SELECT 1 FROM tests WHERE name = OLD.name); 24 | RETURN OLD; 25 | END IF; 26 | RETURN NULL; 27 | END; 28 | $$ LANGUAGE plpgsql; 29 | 30 | -- Create trigger on tests table to maintain unique_test_names 31 | DROP TRIGGER IF EXISTS maintain_unique_test_names_trigger ON tests; 32 | CREATE TRIGGER maintain_unique_test_names_trigger 33 | AFTER INSERT OR UPDATE OR DELETE ON tests 34 | FOR EACH ROW 35 | EXECUTE FUNCTION maintain_unique_test_names(); 36 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-06-21-082528_newoutput/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` 2 | -- Step 1: Add the 'output' column back to Invocations 3 | -- Note: SQLite does not support ADD COLUMN NOT NULL without a default value 4 | -- for existing rows. If you want it NOT NULL, you usually need to recreate the table. 5 | -- Assuming original 'output' was NOT NULL based on your schema. 6 | -- This will add a nullable 'output' column initially. 7 | ALTER TABLE Invocations ADD COLUMN output TEXT; 8 | 9 | -- Step 2: Migrate data back from InvocationOutput to Invocations 10 | -- This aggregates lines back into a single TEXT block, ordered by ID. 11 | -- SQLite's GROUP_CONCAT is used for aggregation. 12 | UPDATE Invocations 13 | SET output = ( 14 | SELECT GROUP_CONCAT(line, X'0A') -- X'0A' is the hex representation for newline character 15 | FROM InvocationOutput 16 | WHERE InvocationOutput.invocation_id = Invocations.id 17 | ORDER BY InvocationOutput.id -- Ensure order is preserved 18 | ); 19 | 20 | -- Handle cases where an invocation might not have any output entries in InvocationOutput. 21 | -- If you want NULL for these, do: 22 | -- UPDATE Invocations SET output = NULL WHERE output IS NULL; -- (if you added it nullable) 23 | -- Or if you want empty string (which is how ADD COLUMN TEXT without default would appear for existing data) 24 | -- UPDATE Invocations SET output = '' WHERE output IS NULL; 25 | 26 | -- Step 3: Drop the InvocationOutput table 27 | DROP TABLE IF EXISTS InvocationOutput; 28 | -------------------------------------------------------------------------------- /blade/tailwindmerge/lib.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | pub fn tailwind_merge(orig: &str, add: &str) -> String { 4 | let mut m: BTreeMap<&str, &str> = BTreeMap::new(); 5 | 6 | orig.split_ascii_whitespace().for_each(|f| { 7 | f.rsplit_once('-') 8 | .map(|(prefix, _)| { 9 | m.insert(prefix, f); 10 | }) 11 | .or_else(|| { 12 | m.insert(f, f); 13 | Some(()) 14 | }); 15 | }); 16 | add.split_ascii_whitespace().for_each(|f| { 17 | f.rsplit_once('-') 18 | .map(|(prefix, _)| { 19 | m.insert(prefix, f); 20 | }) 21 | .or_else(|| { 22 | m.insert(f, f); 23 | Some(()) 24 | }); 25 | }); 26 | 27 | m.into_values() 28 | .fold("".to_string(), |mut acc, e| { 29 | acc += e; 30 | acc += " "; 31 | acc 32 | }) 33 | .trim_end() 34 | .into() 35 | } 36 | 37 | #[cfg(test)] 38 | mod tests { 39 | use crate::tailwind_merge; 40 | 41 | #[test] 42 | fn test_basic() { 43 | assert_eq!(tailwind_merge("m-1 m-2", ""), "m-2"); 44 | assert_eq!(tailwind_merge("m-1", "m-2"), "m-2"); 45 | assert_eq!(tailwind_merge("m-2", "m-1"), "m-1"); 46 | assert_eq!(tailwind_merge("m-2 hover:m-3", "m-1"), "hover:m-3 m-1"); 47 | assert_eq!( 48 | tailwind_merge("m-2 hover:m-4", "m-1 hover:m-1"), 49 | "hover:m-1 m-1" 50 | ); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-06-20-211740_newoutput/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` 2 | -- Migration to Undo Previous Changes 3 | 4 | -- Step 1: Add the 'output' column back to Invocations 5 | ALTER TABLE Invocations 6 | ADD COLUMN output TEXT NOT NULL DEFAULT ''; -- Add NOT NULL and a default empty string for new rows 7 | 8 | -- Important Note for Step 1: 9 | -- If your 'output' column previously allowed NULLs, adjust the above line: 10 | -- ALTER TABLE Invocations 11 | -- ADD COLUMN output TEXT; 12 | -- You might also want to set a default value for existing rows that will get NULL initially. 13 | -- For simplicity and assuming previous 'output' was NOT NULL, we use NOT NULL DEFAULT ''. 14 | 15 | -- Step 2: Migrate data back from InvocationOutput to Invocations 16 | -- This aggregates the lines back into a single text block, preserving order. 17 | UPDATE Invocations inv 18 | SET output = ( 19 | SELECT STRING_AGG(io.line, E'\n' ORDER BY io.id) 20 | FROM InvocationOutput io 21 | WHERE io.invocation_id = inv.id 22 | ) 23 | WHERE EXISTS (SELECT 1 FROM InvocationOutput io WHERE io.invocation_id = inv.id); 24 | 25 | -- Handle cases where an invocation might have had no output (if original 'output' could be empty/null) 26 | -- If an invocation had no entries in InvocationOutput, its 'output' column would remain ''. 27 | -- If you need to revert to NULL for such cases, you might do: 28 | -- UPDATE Invocations SET output = NULL WHERE output = ''; -- Only if 'output' was nullable 29 | 30 | -- Step 3: Drop the InvocationOutput table 31 | DROP TABLE IF EXISTS InvocationOutput; 32 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-11-19-000000_unique_test_names/up.sql: -------------------------------------------------------------------------------- 1 | -- Create unique_test_names table to optimize autocomplete searches 2 | CREATE TABLE IF NOT EXISTS unique_test_names ( 3 | name TEXT NOT NULL PRIMARY KEY 4 | ); 5 | 6 | -- Create index on name for LIKE '%pattern%' searches 7 | CREATE INDEX IF NOT EXISTS unique_test_names_idx ON unique_test_names (name); 8 | 9 | -- Create trigger function to maintain unique_test_names when tests table changes 10 | -- SQLite uses INSTEAD OF triggers for insert/update/delete, but since we're triggering AFTER, 11 | -- we need separate triggers for each operation. 12 | 13 | -- Trigger on INSERT: add new test name if it doesn't exist 14 | CREATE TRIGGER IF NOT EXISTS maintain_unique_test_names_insert 15 | AFTER INSERT ON tests 16 | FOR EACH ROW 17 | WHEN NEW.name NOT IN (SELECT name FROM unique_test_names) 18 | BEGIN 19 | INSERT INTO unique_test_names (name) 20 | VALUES (NEW.name); 21 | END; 22 | 23 | -- Trigger on UPDATE: add updated test name if it doesn't exist 24 | CREATE TRIGGER IF NOT EXISTS maintain_unique_test_names_update 25 | AFTER UPDATE ON tests 26 | FOR EACH ROW 27 | WHEN NEW.name NOT IN (SELECT name FROM unique_test_names) 28 | BEGIN 29 | INSERT INTO unique_test_names (name) 30 | VALUES (NEW.name); 31 | END; 32 | 33 | -- Trigger on DELETE: remove test name only if no other tests have this name 34 | CREATE TRIGGER IF NOT EXISTS maintain_unique_test_names_delete 35 | AFTER DELETE ON tests 36 | FOR EACH ROW 37 | BEGIN 38 | DELETE FROM unique_test_names 39 | WHERE name = OLD.name 40 | AND NOT EXISTS (SELECT 1 FROM tests WHERE name = OLD.name); 41 | END; 42 | -------------------------------------------------------------------------------- /blade/components/tooltip.rs: -------------------------------------------------------------------------------- 1 | use leptos::{html, prelude::*}; 2 | 3 | #[allow(non_snake_case)] 4 | #[component] 5 | pub fn Tooltip( 6 | children: Children, 7 | tooltip: F, 8 | #[prop(optional)] offset_x: f64, 9 | #[prop(optional)] offset_y: f64, 10 | ) -> impl IntoView 11 | where 12 | F: Fn() -> IV, 13 | IV: IntoView, 14 | { 15 | let tel = NodeRef::::new(); 16 | let hover = move |_| { 17 | if let Some(el) = tel.get() { 18 | el.parent_element() 19 | .map(|s| { 20 | let body = document().body().unwrap().get_bounding_client_rect(); 21 | (body, s.get_bounding_client_rect()) 22 | }) 23 | .map(|rects| { 24 | let top = rects.1.y() - rects.0.y() + offset_y; 25 | // arbitrary offset to make things line up better. 26 | let left = rects.1.x() - rects.0.x() + offset_x - 3.0; 27 | el.set_attribute("style", &format!("top: {top}px; left: {left}px;")) 28 | .ok() 29 | }) 30 | .unwrap(); 31 | } 32 | }; 33 | view! { 34 |
    35 | 39 | {tooltip()} 40 | 41 | {children()} 42 |
    43 | } 44 | } 45 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2025-06-21-082528_newoutput/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | -- Create the new InvocationOutput table 3 | CREATE TABLE InvocationOutput ( 4 | id INTEGER PRIMARY KEY AUTOINCREMENT, 5 | invocation_id TEXT NOT NULL, 6 | line TEXT NOT NULL, 7 | -- Add a foreign key constraint. 8 | -- SQLite's foreign key constraints are only enforced if PRAGMA foreign_keys = ON; 9 | FOREIGN KEY (invocation_id) REFERENCES Invocations(id) ON DELETE CASCADE 10 | ); 11 | 12 | -- Copy data from the old 'output' column into the new 'InvocationOutput' table. 13 | -- SQLite doesn't have UNNEST or STRING_TO_ARRAY directly. 14 | -- This part is the trickiest in pure SQL for SQLite. 15 | -- For actual data splitting, you would typically do this in application code (Rust). 16 | -- For this SQL migration, we'll assume a simpler approach or that `output` 17 | -- was single-line if this pure SQL migration were to be runnable. 18 | -- 19 | -- HOWEVER, since you are using a multi-line `output` column, doing this 20 | -- in pure SQLite SQL with `STRING_TO_ARRAY` is NOT feasible. 21 | -- 22 | -- The most practical way to handle multi-line splitting during a SQLite migration 23 | -- is to use application code (e.g., Rust with Diesel). 24 | -- 25 | -- But if we HAD to do it in SQL and assume output is just one line per `Invocation` 26 | -- or we put the *entire* output into one `InvocationOutput` line, it would be: 27 | INSERT INTO InvocationOutput (invocation_id, line) 28 | SELECT id, output FROM Invocations; 29 | 30 | -- Drop the 'output' column from Invocations 31 | -- SQLite's ALTER TABLE DROP COLUMN is supported in newer versions (3.35.0+) 32 | ALTER TABLE Invocations DROP COLUMN output; 33 | -------------------------------------------------------------------------------- /blade/components/searchbar.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use web_sys::KeyboardEvent; 3 | 4 | #[allow(non_snake_case)] 5 | #[component] 6 | pub fn Searchbar( 7 | #[prop(optional, into)] id: String, 8 | #[prop(optional, into)] placeholder: String, 9 | keyup: F, 10 | ) -> impl IntoView 11 | where 12 | F: Fn(KeyboardEvent) + 'static, 13 | { 14 | view! { 15 |
    16 |
    17 | 32 |
    33 | 41 |
    42 | } 43 | } 44 | -------------------------------------------------------------------------------- /blade/routes/empty.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | 3 | #[component] 4 | pub fn Empty() -> impl IntoView { 5 | view! { 6 |
    7 |
    8 | 17 | 18 | 19 | 20 | 27 | 32 | 33 | 34 | 35 | 36 |
    37 |

    Missing Invocation ID

    38 |
    39 | } 40 | } 41 | -------------------------------------------------------------------------------- /blade/bep/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library", "rust_test") 2 | 3 | rust_library( 4 | name = "bep", 5 | srcs = [ 6 | "buildinfo.rs", 7 | "buildtoollogs.rs", 8 | "lib.rs", 9 | "options.rs", 10 | "print_event.rs", 11 | "progress.rs", 12 | "session.rs", 13 | "target.rs", 14 | ], 15 | data = [ 16 | "//blade/bep/protos:bep_descriptors", 17 | "//blade/bep/protos:bes_descriptors", 18 | ], 19 | visibility = ["//visibility:public"], 20 | deps = [ 21 | "//blade/bep/proto_registry", 22 | "//blade/bep/protos:bep_proto", 23 | "//blade/bep/protos:build_event_stream_rust_proto", 24 | "//blade/bep/protos:empty_proto", 25 | "//blade/db", 26 | "//blade/metrics", 27 | "//blade/prototime", 28 | "//blade/prototime:duration_proto", 29 | "//blade/prototime:timestamp_proto", 30 | "//blade/state", 31 | "@crate//:anyhow", 32 | "@crate//:async-stream", 33 | "@crate//:futures", 34 | "@crate//:lazy_static", 35 | "@crate//:prometheus-client", 36 | "@crate//:prost", 37 | "@crate//:prost-reflect", 38 | "@crate//:prost-types", 39 | "@crate//:regex", 40 | "@crate//:scopeguard", 41 | "@crate//:serde", 42 | "@crate//:serde_json", 43 | "@crate//:tokio", 44 | "@crate//:tokio-stream", 45 | "@crate//:tonic", 46 | "@crate//:tonic-reflection", 47 | "@crate//:tracing", 48 | "@crate//:walkdir", 49 | "@rules_rust//tools/runfiles", 50 | ], 51 | ) 52 | 53 | rust_test( 54 | name = "bep_test", 55 | size = "small", 56 | crate = ":bep", 57 | ) 58 | -------------------------------------------------------------------------------- /third_party/rust/patches/diesel-tracing/fix-pgmetadata.patch: -------------------------------------------------------------------------------- 1 | From ac13a0fc464323c95d8e11a57e182be155a05922 Mon Sep 17 00:00:00 2001 2 | From: Chris Portela 3 | Date: Fri, 1 Mar 2024 16:54:43 +0000 4 | Subject: [PATCH 1/2] Fix type for PgMetadataLookup for r2d2 support 5 | 6 | --- 7 | src/pg.rs | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/src/pg.rs b/src/pg.rs 11 | index bddbcc5..bf8ffa0 100644 12 | --- a/src/pg.rs 13 | +++ b/src/pg.rs 14 | @@ -61,7 +61,7 @@ impl MultiConnectionHelper for InstrumentedPgConnection { 15 | ) -> Option<&mut ::MetadataLookup> { 16 | lookup 17 | .downcast_mut::() 18 | - .map(|conn| conn as &mut dyn super::PgMetadataLookup) 19 | + .map(|conn| conn as &mut dyn diesel::pg::PgMetadataLookup) 20 | } 21 | } 22 | 23 | 24 | From d78907704d8575c156fc3341aa67ab92a57aa2af Mon Sep 17 00:00:00 2001 25 | From: Chris Portela 26 | Date: Fri, 1 Mar 2024 17:22:37 +0000 27 | Subject: [PATCH 2/2] Fix missing sql_function import 28 | 29 | --- 30 | src/pg.rs | 2 +- 31 | 1 file changed, 1 insertion(+), 1 deletion(-) 32 | 33 | diff --git a/src/pg.rs b/src/pg.rs 34 | index bf8ffa0..c106282 100644 35 | --- a/src/pg.rs 36 | +++ b/src/pg.rs 37 | @@ -14,7 +14,7 @@ use diesel::query_dsl::{LoadQuery, UpdateAndFetchResults}; 38 | use diesel::r2d2::R2D2Connection; 39 | use diesel::result::{ConnectionError, ConnectionResult, QueryResult}; 40 | use diesel::{select, Table}; 41 | -use diesel::{sql_query, RunQueryDsl}; 42 | +use diesel::{sql_query, sql_function, RunQueryDsl}; 43 | use tracing::{debug, field, instrument}; 44 | 45 | // https://www.postgresql.org/docs/12/functions-info.html 46 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2024" 2 | # Required for options that are unstable 3 | unstable_features = true 4 | 5 | # Separates control expression from their function calls 6 | combine_control_expr = false 7 | 8 | # Replaces consecutive underscore variables into a single .. within tuple patterns 9 | condense_wildcard_suffixes = true 10 | 11 | # Puts single-expression functions on a single line 12 | fn_single_line = true 13 | 14 | # Formats code snippets in doc comments 15 | format_code_in_doc_comments = true 16 | 17 | # Format metavariables in macro declarations 18 | format_macro_matchers = true 19 | 20 | # Reorganizes imports into three groups: std, external, and crate 21 | group_imports = "StdExternalCrate" 22 | 23 | # Force uppercase hex literals 24 | hex_literal_case = "Upper" 25 | 26 | # Merges imports from the same crate 27 | imports_granularity = "Crate" 28 | 29 | # Forces the style of items inside an imports block 30 | imports_layout = "HorizontalVertical" 31 | 32 | # Put a trailing comma after a block based match arm 33 | match_block_trailing_comma = true 34 | 35 | # Converts /* */ comments to // comments where possible 36 | normalize_comments = true 37 | 38 | # Converts #![doc = "..."] attributes to /// comments where possible 39 | normalize_doc_attributes = true 40 | 41 | # Reoders impl items. `type` and `const` are grouped together, then macros and functions 42 | reorder_impl_items = true 43 | 44 | # Ensures that imports (and extern crate statements) are sorted alphabetically 45 | reorder_imports = true 46 | 47 | # Use fields init shorthand when possible 48 | use_field_init_shorthand = true 49 | 50 | # Replaces try! macro with ? operator 51 | use_try_shorthand = true 52 | 53 | # Breaks comments to fit within the maximum line width (80 characters) 54 | wrap_comments = true 55 | 56 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000000_add_indexes/up.sql: -------------------------------------------------------------------------------- 1 | -- Add performance indexes based on observed query patterns 2 | -- NOTE: existing indexes: Targets(invocation_id), Tests(invocation_id), Options(invocation_id), invocationoutput(invocation_id) 3 | -- We avoid duplicates. 4 | 5 | -- 1. Frequently ordered & ranged by start time (test history, pruning old invocations) 6 | CREATE INDEX IF NOT EXISTS invocations_start_idx ON invocations (start DESC); 7 | 8 | -- 2. Retrieval of test history filters by tests.name = $1 then joins invocations and orders by invocations.start 9 | -- A btree index on name accelerates equality lookups. 10 | CREATE INDEX IF NOT EXISTS tests_name_idx ON tests (name); 11 | 12 | -- 3. TestRuns always accessed via belonging_to(tests) i.e. test_id equality 13 | CREATE INDEX IF NOT EXISTS testruns_test_id_idx ON testruns (test_id); 14 | 15 | -- 4. TestArtifacts often gathered per invocation (filter invocation_id = $1) 16 | CREATE INDEX IF NOT EXISTS testartifacts_inv_id_idx ON testartifacts (invocation_id); 17 | 18 | -- 5. Options filtered by (kind='Build Metadata') AND keyval eq/like patterns for metadata & flag queries. 19 | -- Separate partial index for Build Metadata equality; general keyval index for flag lookups with prefix patterns. 20 | CREATE INDEX IF NOT EXISTS options_keyval_idx ON options (keyval); 21 | CREATE INDEX IF NOT EXISTS options_build_metadata_keyval_idx ON options (keyval) WHERE kind = 'Build Metadata'; 22 | 23 | -- 6. InvocationOutput: fetch lines by invocation ordered by id asc; deletes last N lines by ordering id desc. 24 | -- Existing single-column index on invocation_id helps filter but not ordering; composite improves order + limit scans. 25 | CREATE INDEX IF NOT EXISTS invocationoutput_inv_id_id_idx ON invocationoutput (invocation_id, id DESC); 26 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2025-09-16-000200_fix_large_keyval_indexes/up.sql: -------------------------------------------------------------------------------- 1 | -- Mitigate oversized btree index tuples on options.keyval 2 | -- Strategy: 3 | -- 1. Drop problematic wide btree indexes (options_keyval_idx, options_build_metadata_keyval_idx) 4 | -- 2. Add hash index for equality (fast, constant-length hash values) 5 | -- 3. Add functional btree index on left(keyval, 200) to support prefix lookups and reduce tuple size 6 | -- 4. Retain trigram GIN (if created) for ILIKE/substring searches 7 | -- 5. Add partial hash index for Build Metadata equality queries (smaller, improves branch/commit lookups) 8 | -- Notes: 9 | -- * Hash indexes are WAL-logged and crash-safe since PG 10; acceptable here. 10 | -- * left(keyval, 200) assumes relevant distinguishing prefix within first 200 chars. Adjust if needed. 11 | -- * For LIKE 'foo%' queries planner can use btree(left(keyval,200)) if foo length <= 200. 12 | -- * For exact equality, planner can choose hash index or trigram (usually hash). 13 | 14 | -- Drop old wide btree indexes if they exist (avoid errors if already failed creation) 15 | DROP INDEX IF EXISTS options_build_metadata_keyval_idx; 16 | DROP INDEX IF EXISTS options_keyval_idx; 17 | 18 | -- Ensure pg_trgm extension is present if relying on trigram search (no-op if exists) 19 | CREATE EXTENSION IF NOT EXISTS pg_trgm; 20 | 21 | -- Equality hash index on full keyval 22 | CREATE INDEX IF NOT EXISTS options_keyval_hash_idx ON options USING HASH (keyval); 23 | 24 | -- Partial equality hash index for build metadata subset 25 | CREATE INDEX IF NOT EXISTS options_build_metadata_keyval_hash_idx ON options USING HASH (keyval) WHERE kind='Build Metadata'; 26 | 27 | -- Prefix btree index on first 200 characters (adjust length if needed) 28 | CREATE INDEX IF NOT EXISTS options_keyval_prefix_idx ON options (left(keyval, 200)); 29 | -------------------------------------------------------------------------------- /prost/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_proto//proto:proto_toolchain.bzl", "proto_toolchain") 2 | load("@rules_rust//rust:defs.bzl", "rust_library_group") 3 | load("@rules_rust_prost//:defs.bzl", "rust_prost_toolchain") 4 | 5 | rust_library_group( 6 | name = "prost_runtime", 7 | deps = [ 8 | "@crate//:prost", 9 | "@crate//:prost-reflect", 10 | ], 11 | ) 12 | 13 | rust_library_group( 14 | name = "tonic_runtime", 15 | deps = [ 16 | ":prost_runtime", 17 | "@crate//:futures-core", 18 | "@crate//:tonic", 19 | ], 20 | ) 21 | 22 | rust_prost_toolchain( 23 | name = "prost_toolchain_impl", 24 | prost_opts = [ 25 | "type_attribute=.=#[derive(prost_reflect::ReflectMessage)]", 26 | """type_attribute=.=#[prost_reflect(descriptor_pool = "prost_reflect::DescriptorPool::global()"\\, message_name = "{MESSAGE}")]""", 27 | ], 28 | prost_plugin = "@crate//:protoc-gen-prost__protoc-gen-prost", 29 | prost_plugin_flag = "--plugin=protoc-gen-prost=%s", 30 | prost_runtime = ":prost_runtime", 31 | prost_types = "@crate//:prost-types", 32 | tonic_plugin = "@crate//:protoc-gen-tonic__protoc-gen-tonic", 33 | tonic_runtime = ":tonic_runtime", 34 | ) 35 | 36 | toolchain( 37 | name = "prost_toolchain", 38 | toolchain = "prost_toolchain_impl", 39 | toolchain_type = "@rules_rust_prost//:toolchain_type", 40 | ) 41 | 42 | proto_toolchain( 43 | name = "nixprotoc_toolchain_impl", 44 | proto_compiler = "@protobuf//:protoc", 45 | ) 46 | 47 | toolchain( 48 | name = "nixprotoc_toolchain", 49 | # Bazel does not follow this attribute during analysis, so the referenced repo 50 | # will only be fetched if this toolchain is selected. 51 | toolchain = ":nixprotoc_toolchain_impl", 52 | toolchain_type = "@rules_proto//proto:toolchain_type", 53 | ) 54 | -------------------------------------------------------------------------------- /blade/bep/protos/package_load_metrics.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2020 The Bazel Authors. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | syntax = "proto2"; 15 | 16 | package devtools.build.lib.packages.metrics; 17 | 18 | import "google/protobuf/duration.proto"; 19 | 20 | option java_package = "com.google.devtools.build.lib.packages.metrics"; 21 | option java_multiple_files = true; 22 | 23 | // Message used to concisely report all package metrics. 24 | message PackageLoadMetrics { 25 | // Name of the package. 26 | optional string name = 1; 27 | 28 | // Wall-time duration it took to construct the package. 29 | optional google.protobuf.Duration load_duration = 2; 30 | 31 | // Number of targets created in the package. 32 | optional uint64 num_targets = 3; 33 | 34 | // Number of Starlark computation steps required to create the package. 35 | optional uint64 computation_steps = 4; 36 | 37 | // Number of transitive Starlark load()s required to create the package. 38 | optional uint64 num_transitive_loads = 5; 39 | 40 | // Numeric value given to the memory and general accounting costs associated 41 | // with a loaded package. Values are an approximate but reasonable proxy for 42 | // the real storage costs of a package. 43 | optional uint64 package_overhead = 6; 44 | } -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'CI' 3 | 4 | "on": push 5 | 6 | permissions: 7 | actions: write 8 | contents: write 9 | id-token: write 10 | 11 | 12 | concurrency: 13 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | ci: 18 | runs-on: ["ubuntu-latest"] 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | - uses: nixbuild/nix-quick-install-action@v30 23 | with: 24 | nix_conf: | 25 | keep-env-derivations = true 26 | keep-outputs = true 27 | - name: Restore and save Nix store 28 | uses: nix-community/cache-nix-action@v6 29 | with: 30 | # restore and save a cache using this key 31 | primary-key: nix-${{ runner.os }}-${{ hashFiles('**/*.nix', '**/flake.lock') }} 32 | # if there's no cache hit, restore a cache by this prefix 33 | restore-prefixes-first-match: nix-${{ runner.os }}- 34 | # collect garbage until the Nix store size (in bytes) is at most this number 35 | # before trying to save a new cache 36 | # 1G = 1073741824 37 | gc-max-store-size-linux: 1G 38 | # do purge caches 39 | purge: true 40 | # purge all versions of the cache 41 | purge-prefixes: nix-${{ runner.os }}- 42 | # created more than this number of seconds ago 43 | purge-created: 0 44 | # or, last accessed more than this number of seconds ago 45 | # relative to the start of the `Post Restore and save Nix store` phase 46 | purge-last-accessed: 0 47 | # except any version with the key that is the same as the `primary-key` 48 | purge-primary-key: never 49 | - uses: HatsuneMiku3939/direnv-action@v1 50 | - run: bazel test //... -------------------------------------------------------------------------------- /blade/components/dashboard/pass_fail_scatterplot.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use state::{Status, TestHistory}; 3 | 4 | use crate::{ 5 | charts::scatterplot::ScatterPlot, 6 | navigation::open_in_new_tab, 7 | summaryheader::format_time, 8 | }; 9 | 10 | #[allow(non_snake_case)] 11 | #[component] 12 | pub fn PassFailScatterPlot(history: TestHistory) -> impl IntoView { 13 | let on_point_click = |point: state::TestHistoryPoint| { 14 | let link = format!("/invocation/{}", point.invocation_id); 15 | open_in_new_tab(&link); 16 | }; 17 | 18 | view! { 19 | 1.0, 26 | Status::Fail => 0.0, 27 | _ => 0.5, 28 | } 29 | _x_label_accessor=|point: &state::TestHistoryPoint| { format_time(&point.start) } 30 | point_color_accessor=|point| match point.test.status { 31 | Status::Success => "#48bb78".to_string(), 32 | Status::Fail => "#f56565".to_string(), 33 | _ => "#a0aec0".to_string(), 34 | } 35 | tooltip_content_accessor=|point| { 36 | format!( 37 | "Invocation: {}\nStatus: {}\nDate: {}", 38 | point.invocation_id.chars().take(8).collect::(), 39 | point.test.status, 40 | format_time(&point.start), 41 | ) 42 | } 43 | on_point_click=on_point_click 44 | x_axis_label="Time" 45 | y_axis_label="Status (0=Fail, 1=Success)" 46 | /> 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /blade/db/sqlite/migrations/2023-11-29-101344_init/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Invocations ( 2 | id TEXT NOT NULL PRIMARY KEY, 3 | status TEXT NOT NULL, 4 | start TEXT NOT NULL, 5 | end TEXT, 6 | output TEXT NOT NULL, 7 | command TEXT NOT NULL, 8 | pattern TEXT 9 | ); 10 | 11 | CREATE TABLE Targets ( 12 | id TEXT NOT NULL PRIMARY KEY, 13 | invocation_id TEXT NOT NULL, 14 | name TEXT NOT NULL, 15 | status TEXT NOT NULL, 16 | kind TEXT NOT NULL, 17 | start TEXT NOT NULL, 18 | end TEXT, 19 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 20 | ON DELETE CASCADE 21 | ); 22 | CREATE INDEX Targets_Inv_ID ON Targets ( invocation_id ); 23 | 24 | CREATE TABLE Tests ( 25 | id TEXT NOT NULL PRIMARY KEY, 26 | invocation_id TEXT NOT NULL, 27 | name TEXT NOT NULL, 28 | status TEXT NOT NULL, 29 | duration_s Double, 30 | end TEXT NOT NULL, 31 | num_runs INTEGER, 32 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 33 | ON DELETE CASCADE 34 | ); 35 | CREATE INDEX Tests_Inv_ID ON Tests ( invocation_id ); 36 | 37 | CREATE TABLE TestRuns ( 38 | id TEXT NOT NULL PRIMARY KEY, 39 | invocation_id TEXT NOT NULL, 40 | test_id TEXT NOT NULL, 41 | run INTEGER NOT NULL, 42 | shard INTEGER NOT NULL, 43 | attempt INTEGER NOT NULL, 44 | status TEXT NOT NULL, 45 | details TEXT NOT NULL, 46 | duration_s Double NOT NULL, 47 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 48 | ON DELETE CASCADE, 49 | FOREIGN KEY(test_id) REFERENCES Tests(id) 50 | ON DELETE CASCADE 51 | ); 52 | 53 | CREATE TABLE TestArtifacts ( 54 | id TEXT NOT NULL PRIMARY KEY, 55 | invocation_id TEXT NOT NULL, 56 | test_run_id TEXT NOT NULL, 57 | name TEXT NOT NULL, 58 | uri TEXT NOT NULL, 59 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 60 | ON DELETE CASCADE, 61 | FOREIGN KEY(test_run_id) REFERENCES TestRuns(id) 62 | ON DELETE CASCADE 63 | ); 64 | -------------------------------------------------------------------------------- /MODULE.bazel: -------------------------------------------------------------------------------- 1 | """Bzlmod Configuration for Blade""" 2 | 3 | module( 4 | name = "blade", 5 | repo_name = "blade", 6 | ) 7 | 8 | bazel_dep(name = "platforms", version = "1.0.0") 9 | bazel_dep(name = "rules_proto", version = "7.1.0") 10 | bazel_dep(name = "rules_pkg", version = "1.1.0") 11 | bazel_dep(name = "aspect_bazel_lib", version = "2.21.1") 12 | bazel_dep(name = "aspect_rules_js", version = "2.6.0") 13 | bazel_dep(name = "rules_cc", version = "0.2.8") 14 | bazel_dep(name = "rules_java", version = "8.15.2") 15 | 16 | # 17 | # rules_rust setup 18 | # 19 | bazel_dep(name = "rules_rust", version = "0.65.0") 20 | single_version_override( 21 | module_name = "rules_rust", 22 | patch_strip = 1, 23 | # https://github.com/bazelbuild/rules_rust/issues/3429 24 | patches = ["//third_party/rust/patches/rules_rust:0001-Revert-Update-runfiles-to-work-with-directory-embedd.patch"], 25 | ) 26 | 27 | bazel_dep(name = "rules_rust_prost", version = "0.65.0") 28 | bazel_dep(name = "rules_rust_wasm_bindgen", version = "0.65.0") 29 | 30 | register_toolchains("@rules_rust//rust/private/dummy_cc_toolchain:dummy_cc_wasm32_toolchain") 31 | # Third party crates setup is still in the WORKSPACE.bazel file... 32 | 33 | # 34 | # rules_oci setup 35 | # 36 | bazel_dep(name = "rules_oci", version = "2.2.6") 37 | # For testing, we also recommend https://registry.bazel.build/modules/container_structure_test 38 | 39 | oci = use_extension("@rules_oci//oci:extensions.bzl", "oci") 40 | 41 | # Declare external images you need to pull, for example: 42 | oci.pull( 43 | name = "distroless_base", 44 | digest = "sha256:27769871031f67460f1545a52dfacead6d18a9f197db77110cfc649ca2a91f44", 45 | image = "gcr.io/distroless/base", 46 | platforms = ["linux/amd64"], 47 | ) 48 | 49 | # For each oci.pull call, repeat the "name" here to expose them as dependencies. 50 | use_repo(oci, "distroless_base", "distroless_base_linux_amd64") 51 | 52 | # 53 | # Protobuf 54 | # 55 | register_toolchains("@@//prost:nixprotoc_toolchain") 56 | -------------------------------------------------------------------------------- /blade/bep/protos/option_filters.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2017 The Bazel Authors. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | syntax = "proto3"; 15 | 16 | package options; 17 | 18 | // option java_api_version = 2; 19 | option java_package = "com.google.devtools.common.options.proto"; 20 | 21 | // IMPORTANT NOTE: These two enums must be kept in sync with their Java 22 | // equivalents in src/main/java/com/google/devtools/common/options. 23 | // Changing this proto has specific compatibility requirements, please see the 24 | // Java documentation for details. 25 | 26 | // Docs in java enum. 27 | enum OptionEffectTag { 28 | // This option's effect or intent is unknown. 29 | UNKNOWN = 0; 30 | 31 | // This flag has literally no effect. 32 | NO_OP = 1; 33 | 34 | LOSES_INCREMENTAL_STATE = 2; 35 | CHANGES_INPUTS = 3; 36 | AFFECTS_OUTPUTS = 4; 37 | BUILD_FILE_SEMANTICS = 5; 38 | BAZEL_INTERNAL_CONFIGURATION = 6; 39 | LOADING_AND_ANALYSIS = 7; 40 | EXECUTION = 8; 41 | HOST_MACHINE_RESOURCE_OPTIMIZATIONS = 9; 42 | EAGERNESS_TO_EXIT = 10; 43 | BAZEL_MONITORING = 11; 44 | TERMINAL_OUTPUT = 12; 45 | ACTION_COMMAND_LINES = 13; 46 | TEST_RUNNER = 14; 47 | } 48 | 49 | // Docs in java enum. 50 | enum OptionMetadataTag { 51 | EXPERIMENTAL = 0; 52 | INCOMPATIBLE_CHANGE = 1; 53 | DEPRECATED = 2; 54 | HIDDEN = 3; 55 | INTERNAL = 4; 56 | reserved "TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES"; 57 | reserved 5; 58 | reserved 6; 59 | } -------------------------------------------------------------------------------- /blade/routes/artifact.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Cursor, prelude::Read}; 2 | 3 | use components::shellout::ShellOut; 4 | use leptos::prelude::*; 5 | use leptos_router::{hooks::use_query, params::Params}; 6 | 7 | #[derive(PartialEq, Params, Debug, Clone)] 8 | struct ArtifactParams { 9 | uri: Option, 10 | zip: Option, 11 | } 12 | 13 | fn stringify(e: impl std::fmt::Debug) -> String { format!("{e:#?}") } 14 | 15 | #[component] 16 | pub fn Artifact() -> impl IntoView { 17 | let params = use_query::(); 18 | let artifact = LocalResource::new(move || async move { 19 | let Ok(ArtifactParams { uri, zip }) = params.get() else { 20 | return Err("error parsing query string".into()); 21 | }; 22 | let Some(uri) = uri else { 23 | return Err("empty uri".into()); 24 | }; 25 | let bytes = crate::test::get_artifact(uri).await.map_err(stringify)?; 26 | if let Some(zip) = zip { 27 | let cur = Cursor::new(bytes); 28 | let mut arc = zip::ZipArchive::new(cur).map_err(stringify)?; 29 | let mut file = arc.by_name(&zip).map_err(stringify)?; 30 | let mut out = "".to_string(); 31 | file.read_to_string(&mut out).map_err(stringify)?; 32 | return Ok::(out); 33 | } 34 | Ok(String::from_utf8_lossy(&bytes).to_string()) 35 | }); 36 | view! { 37 |
    38 | Loading...
    } 40 | }> 41 | {move || Suspend::new(async move { 42 | let t: String = match artifact.await { 43 | Ok(t) => t, 44 | Err(t) => t, 45 | }; 46 | view! { } 47 | })} 48 | 49 | 50 | 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /blade/shared/lib.rs: -------------------------------------------------------------------------------- 1 | // Shared types and functions used across multiple crates 2 | #[cfg(feature = "ssr")] 3 | use std::sync::Arc; 4 | 5 | use leptos::{prelude::*, server_fn::ServerFnError}; 6 | #[cfg(feature = "ssr")] 7 | use state::Global; 8 | 9 | #[server] 10 | pub async fn get_artifact(uri: String) -> Result, ServerFnError> { 11 | let global: Arc = use_context::>().unwrap(); 12 | let parsed = url::Url::parse(&uri) 13 | .map_err(|e| ServerFnError::::ServerError(format!("{e:#?}")))?; 14 | match parsed.scheme() { 15 | "file" => { 16 | if !global.allow_local { 17 | return Err(ServerFnError::ServerError("not implemented".to_string())); 18 | } 19 | let path = parsed 20 | .to_file_path() 21 | .map_err(|e| ServerFnError::::ServerError(format!("{e:#?}")))?; 22 | std::fs::read(path).map_err(|_| ServerFnError::::ServerError("bad path".into())) 23 | }, 24 | "bytestream" | "http" | "https" => global 25 | .bytestream_client 26 | .download_file(&uri) 27 | .await 28 | .map_err(|e| ServerFnError::ServerError(format!("failed to get artifact: {e}"))), 29 | _ => Err(ServerFnError::ServerError("not implemented".to_string())), 30 | } 31 | } 32 | 33 | #[server] 34 | pub async fn search_test_names( 35 | pattern: String, 36 | limit: Option, 37 | ) -> Result, ServerFnError> { 38 | let global: Arc = use_context::>().unwrap(); 39 | let mut db = global 40 | .db_manager 41 | .get() 42 | .map_err(|e| ServerFnError::::ServerError(format!("failed to get db: {e}")))?; 43 | 44 | let search_limit = limit.unwrap_or(10).min(50); // Cap at 50 results 45 | db.search_test_names(&pattern, search_limit).map_err(|e| { 46 | ServerFnError::::ServerError(format!("failed to search test names: {e}")) 47 | }) 48 | } 49 | -------------------------------------------------------------------------------- /blade/bep/protos/action_cache.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2017 The Bazel Authors. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | syntax = "proto3"; 16 | 17 | package blaze; 18 | 19 | option java_package = "com.google.devtools.build.lib.actions.cache"; 20 | option java_outer_classname = "Protos"; 21 | 22 | // Information about the action cache behavior during a single build. 23 | message ActionCacheStatistics { 24 | // Size of the action cache in bytes. 25 | // 26 | // This is computed by the code that persists the action cache to disk and 27 | // represents the size of the written files, which has no direct relation to 28 | // the number of entries in the cache. 29 | uint64 size_in_bytes = 1; 30 | 31 | // Time it took to save the action cache to disk. 32 | uint64 save_time_in_ms = 2; 33 | 34 | // Reasons for not finding an action in the cache. 35 | enum MissReason { 36 | DIFFERENT_ACTION_KEY = 0; 37 | DIFFERENT_DEPS = 1; 38 | DIFFERENT_ENVIRONMENT = 2; 39 | DIFFERENT_FILES = 3; 40 | CORRUPTED_CACHE_ENTRY = 4; 41 | NOT_CACHED = 5; 42 | UNCONDITIONAL_EXECUTION = 6; 43 | } 44 | 45 | // Detailed information for a particular miss reason. 46 | message MissDetail { 47 | MissReason reason = 1; 48 | int32 count = 2; 49 | } 50 | 51 | // Cache counters. 52 | int32 hits = 3; 53 | int32 misses = 4; 54 | 55 | // Breakdown of the cache misses based on the reasons behind them. 56 | repeated MissDetail miss_details = 5; 57 | 58 | // NEXT TAG: 6 59 | } -------------------------------------------------------------------------------- /blade/components/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library") 2 | 3 | rust_library( 4 | name = "components", 5 | srcs = glob(["**/*.rs"]), 6 | crate_features = select({ 7 | "@platforms//cpu:wasm32": ["hydrate"], 8 | "//conditions:default": ["ssr"], 9 | }), 10 | visibility = ["//visibility:public"], 11 | deps = select({ 12 | "@platforms//cpu:wasm32": [ 13 | "//blade/darkmode", 14 | "//blade/shared", 15 | "//blade/state", 16 | "//blade/tailwindmerge", 17 | "//blade/trace_event_parser", 18 | "@wasm_crate//:ansi-to-html", 19 | "@wasm_crate//:anyhow", 20 | "@wasm_crate//:humantime", 21 | "@wasm_crate//:junit-parser", 22 | "@wasm_crate//:leptos", 23 | "@wasm_crate//:leptos_dom", 24 | "@wasm_crate//:leptos_router", 25 | "@wasm_crate//:serde", 26 | "@wasm_crate//:serde_json", 27 | "@wasm_crate//:time", 28 | "@wasm_crate//:tracing", 29 | "@wasm_crate//:url", 30 | "@wasm_crate//:url-escape", 31 | "@wasm_crate//:wasm-bindgen", 32 | "@wasm_crate//:web-sys", 33 | ], 34 | "//conditions:default": [ 35 | "//blade/darkmode", 36 | "//blade/shared", 37 | "//blade/state", 38 | "//blade/tailwindmerge", 39 | "//blade/trace_event_parser", 40 | "@crate//:ansi-to-html", 41 | "@crate//:anyhow", 42 | "@crate//:humantime", 43 | "@crate//:junit-parser", 44 | "@crate//:leptos", 45 | "@crate//:leptos_dom", 46 | "@crate//:leptos_router", 47 | "@crate//:serde", 48 | "@crate//:serde_json", 49 | "@crate//:time", 50 | "@crate//:tracing", 51 | "@crate//:url", 52 | "@crate//:url-escape", 53 | "@crate//:wasm-bindgen", 54 | "@crate//:web-sys", 55 | ], 56 | }), 57 | ) 58 | -------------------------------------------------------------------------------- /blade/db/manager.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use diesel::r2d2::{ConnectionManager, Pool}; 3 | use diesel_tracing::{pg::InstrumentedPgConnection, sqlite::InstrumentedSqliteConnection}; 4 | 5 | pub struct SqliteManager { 6 | pool: Pool>, 7 | } 8 | 9 | impl SqliteManager { 10 | #[allow(clippy::new_ret_no_self)] 11 | pub fn new(uri: &str) -> anyhow::Result> { 12 | crate::sqlite::init_db(uri)?; 13 | let manager = ConnectionManager::::new(uri); 14 | let pool = Pool::builder() 15 | .test_on_check_out(true) 16 | .build(manager) 17 | .context("failed to build db connection pool")?; 18 | Ok(Box::new(Self { pool })) 19 | } 20 | } 21 | 22 | impl state::DBManager for SqliteManager { 23 | fn get(&self) -> anyhow::Result> { 24 | let conn = self 25 | .pool 26 | .get() 27 | .context("failed to get connection from pool")?; 28 | Ok(Box::new(crate::sqlite::Sqlite::new(conn)?)) 29 | } 30 | } 31 | 32 | pub struct PostgresManager { 33 | pool: Pool>, 34 | } 35 | 36 | impl PostgresManager { 37 | #[allow(clippy::new_ret_no_self)] 38 | pub fn new(uri: &str) -> anyhow::Result> { 39 | crate::postgres::init_db(uri)?; 40 | let manager = ConnectionManager::::new(uri); 41 | let pool = Pool::builder() 42 | .test_on_check_out(true) 43 | .build(manager) 44 | .context("failed to build db connection pool")?; 45 | Ok(Box::new(Self { pool })) 46 | } 47 | } 48 | 49 | impl state::DBManager for PostgresManager { 50 | fn get(&self) -> anyhow::Result> { 51 | let conn = self 52 | .pool 53 | .get() 54 | .context("failed to get connection from pool")?; 55 | Ok(Box::new(crate::postgres::Postgres { conn })) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /blade/db/postgres/migrations/2023-11-29-101344_init/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Invocations ( 2 | id TEXT NOT NULL PRIMARY KEY, 3 | status TEXT NOT NULL, 4 | start TIMESTAMP WITH TIME ZONE NOT NULL, 5 | "end" TIMESTAMP WITH TIME ZONE, 6 | output TEXT NOT NULL, 7 | command TEXT NOT NULL, 8 | pattern TEXT 9 | ); 10 | 11 | CREATE TABLE Targets ( 12 | id TEXT NOT NULL PRIMARY KEY, 13 | invocation_id TEXT NOT NULL, 14 | name TEXT NOT NULL, 15 | status TEXT NOT NULL, 16 | kind TEXT NOT NULL, 17 | start TIMESTAMP WITH TIME ZONE NOT NULL, 18 | "end" TIMESTAMP WITH TIME ZONE, 19 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 20 | ON DELETE CASCADE 21 | ); 22 | CREATE INDEX Targets_Inv_ID ON Targets ( invocation_id ); 23 | 24 | CREATE TABLE Tests ( 25 | id TEXT NOT NULL PRIMARY KEY, 26 | invocation_id TEXT NOT NULL, 27 | name TEXT NOT NULL, 28 | status TEXT NOT NULL, 29 | duration_s double precision, 30 | "end" TIMESTAMP WITH TIME ZONE NOT NULL, 31 | num_runs INTEGER, 32 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 33 | ON DELETE CASCADE 34 | ); 35 | CREATE INDEX Tests_Inv_ID ON Tests ( invocation_id ); 36 | 37 | CREATE TABLE TestRuns ( 38 | id TEXT NOT NULL PRIMARY KEY, 39 | invocation_id TEXT NOT NULL, 40 | test_id TEXT NOT NULL, 41 | run INTEGER NOT NULL, 42 | shard INTEGER NOT NULL, 43 | attempt INTEGER NOT NULL, 44 | status TEXT NOT NULL, 45 | details TEXT NOT NULL, 46 | duration_s double precision NOT NULL, 47 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 48 | ON DELETE CASCADE, 49 | FOREIGN KEY(test_id) REFERENCES Tests(id) 50 | ON DELETE CASCADE 51 | ); 52 | 53 | CREATE TABLE TestArtifacts ( 54 | id TEXT NOT NULL PRIMARY KEY, 55 | invocation_id TEXT NOT NULL, 56 | test_run_id TEXT NOT NULL, 57 | name TEXT NOT NULL, 58 | uri TEXT NOT NULL, 59 | FOREIGN KEY(invocation_id) REFERENCES Invocations(id) 60 | ON DELETE CASCADE, 61 | FOREIGN KEY(test_run_id) REFERENCES TestRuns(id) 62 | ON DELETE CASCADE 63 | ); 64 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Release' 3 | 4 | "on": workflow_dispatch 5 | 6 | permissions: 7 | contents: write 8 | id-token: write 9 | packages: write 10 | 11 | 12 | concurrency: 13 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | release: 18 | runs-on: ["ubuntu-latest"] 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | - uses: nixbuild/nix-quick-install-action@v30 23 | with: 24 | nix_conf: | 25 | keep-env-derivations = true 26 | keep-outputs = true 27 | - name: Restore and save Nix store 28 | uses: nix-community/cache-nix-action@v6 29 | with: 30 | # restore and save a cache using this key 31 | primary-key: nix-${{ runner.os }}-${{ hashFiles('**/*.nix', '**/flake.lock') }} 32 | # if there's no cache hit, restore a cache by this prefix 33 | restore-prefixes-first-match: nix-${{ runner.os }}- 34 | # collect garbage until the Nix store size (in bytes) is at most this number 35 | # before trying to save a new cache 36 | # 1G = 1073741824 37 | gc-max-store-size-linux: 1G 38 | # do purge caches 39 | purge: true 40 | # purge all versions of the cache 41 | purge-prefixes: nix-${{ runner.os }}- 42 | # created more than this number of seconds ago 43 | purge-created: 0 44 | # or, last accessed more than this number of seconds ago 45 | # relative to the start of the `Post Restore and save Nix store` phase 46 | purge-last-accessed: 0 47 | # except any version with the key that is the same as the `primary-key` 48 | purge-primary-key: never 49 | - uses: HatsuneMiku3939/direnv-action@v1 50 | - name: Log in to registry 51 | run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin 52 | - run: bazel run --config=release -c opt --embed_label="$(git describe --tags HEAD)" //blade:blade_push 53 | -------------------------------------------------------------------------------- /blade/static/test.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | 10 | 11 | 13 | 15 | 17 | 19 | 27 | 28 | -------------------------------------------------------------------------------- /blade/bep/options.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use build_event_stream_proto::build_event_stream; 3 | use state::DBManager; 4 | 5 | pub(crate) struct Handler {} 6 | 7 | impl crate::EventHandler for Handler { 8 | fn handle_event( 9 | &self, 10 | db_mgr: &dyn DBManager, 11 | invocation_id: &str, 12 | event: &build_event_stream::BuildEvent, 13 | ) -> anyhow::Result<()> { 14 | match &event.payload { 15 | Some(build_event_stream::build_event::Payload::UnstructuredCommandLine(opts)) => { 16 | let mut db = db_mgr.get().context("failed to get db handle")?; 17 | let o = state::BuildOptions { 18 | unstructured: opts.args.clone(), 19 | ..Default::default() 20 | }; 21 | db.insert_options(invocation_id, &o) 22 | .context("failed to insert unstructured command line")?; 23 | }, 24 | Some(build_event_stream::build_event::Payload::OptionsParsed(opts)) => { 25 | let mut db = db_mgr.get().context("failed to get db handle")?; 26 | let o = state::BuildOptions { 27 | startup: opts.startup_options.clone(), 28 | explicit_startup: opts.explicit_startup_options.clone(), 29 | cmd_line: opts.cmd_line.clone(), 30 | explicit_cmd_line: opts.explicit_cmd_line.clone(), 31 | ..Default::default() 32 | }; 33 | db.insert_options(invocation_id, &o) 34 | .context("failed to insert parsed options")?; 35 | }, 36 | Some(build_event_stream::build_event::Payload::BuildMetadata(md)) => { 37 | let mut db = db_mgr.get().context("failed to get db handle")?; 38 | let o = state::BuildOptions { 39 | build_metadata: md.metadata.clone(), 40 | ..Default::default() 41 | }; 42 | db.insert_options(invocation_id, &o) 43 | .context("failed to insert parsed options")?; 44 | }, 45 | _ => {}, 46 | } 47 | Ok(()) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /tailwindcss/tailwindcss.bzl: -------------------------------------------------------------------------------- 1 | """Rules to handle tailwindcss.""" 2 | 3 | SrcsInfo = provider( 4 | doc = "SrcsInfo contains source files for a target", 5 | fields = { 6 | "srcs": "depset of source files", 7 | }, 8 | ) 9 | 10 | def _srcs_aspect_impl(target, ctx): 11 | # Ignore external dependencies. 12 | if target.label.workspace_root: 13 | return [] 14 | srcs = [] 15 | 16 | # Make sure the rule has a srcs attribute. 17 | if hasattr(ctx.rule.attr, "srcs"): 18 | # Iterate through the files that make up the sources and 19 | # print their paths. 20 | for src in ctx.rule.attr.srcs: 21 | srcs.append(src.files) 22 | if hasattr(ctx.rule.attr, "deps"): 23 | for dep in ctx.rule.attr.deps: 24 | if SrcsInfo in dep: 25 | srcs.append(dep[SrcsInfo].srcs) 26 | return [SrcsInfo(srcs = depset(transitive = srcs))] 27 | 28 | _srcs_aspect = aspect( 29 | implementation = _srcs_aspect_impl, 30 | attr_aspects = ["deps"], 31 | ) 32 | 33 | def _impl(ctx): 34 | f = ctx.actions.declare_file("%s.css" % ctx.label.name) 35 | si = ctx.attr.target[SrcsInfo] 36 | 37 | ctx.actions.run( 38 | outputs = [f], 39 | inputs = depset([ctx.file.src, ctx.file._tailwindcss_config], transitive = [si.srcs]), 40 | executable = ctx.executable._tailwindcss, 41 | arguments = [ 42 | "--input", 43 | ctx.file.src.path, 44 | "--output", 45 | f.path, 46 | "-m", 47 | ], 48 | mnemonic = "TailwindCSS", 49 | ) 50 | return [DefaultInfo( 51 | files = depset([f]), 52 | )] 53 | 54 | tailwindcss = rule( 55 | implementation = _impl, 56 | attrs = { 57 | "src": attr.label( 58 | allow_single_file = [".css"], 59 | mandatory = True, 60 | ), 61 | "target": attr.label( 62 | mandatory = True, 63 | aspects = [_srcs_aspect], 64 | ), 65 | "_tailwindcss": attr.label( 66 | default = "@tailwindcss//:tailwindcss-cli", 67 | executable = True, 68 | cfg = "exec", 69 | ), 70 | "_tailwindcss_config": attr.label( 71 | default = "//:tailwind.config.js", 72 | allow_single_file = True, 73 | ), 74 | }, 75 | ) 76 | -------------------------------------------------------------------------------- /blade/bep/protos/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust_prost//:defs.bzl", "rust_prost_library") 2 | load("//blade/bep/protos:descriptors.bzl", "transitive_proto_descriptors") 3 | 4 | rust_prost_library( 5 | name = "empty_proto", 6 | proto = "@protobuf//:empty_proto", 7 | visibility = ["//visibility:public"], 8 | ) 9 | 10 | rust_prost_library( 11 | name = "bep_proto", 12 | proto = "@googleapis//google/devtools/build/v1:build_proto", 13 | visibility = ["//visibility:public"], 14 | ) 15 | 16 | transitive_proto_descriptors( 17 | name = "bep_descriptors", 18 | proto = ":build_event_stream_proto", 19 | visibility = ["//visibility:public"], 20 | ) 21 | 22 | transitive_proto_descriptors( 23 | name = "bes_descriptors", 24 | proto = "@googleapis//google/devtools/build/v1:build_proto", 25 | visibility = ["//visibility:public"], 26 | ) 27 | 28 | rust_prost_library( 29 | name = "build_event_stream_rust_proto", 30 | proto = ":build_event_stream_proto", 31 | visibility = ["//visibility:public"], 32 | ) 33 | 34 | proto_library( 35 | name = "action_cache_proto", 36 | srcs = ["action_cache.proto"], 37 | ) 38 | 39 | proto_library( 40 | name = "build_event_stream_proto", 41 | srcs = ["build_event_stream.proto"], 42 | visibility = ["//visibility:public"], 43 | deps = [ 44 | ":action_cache_proto", 45 | ":command_line_proto", 46 | ":failure_details_proto", 47 | ":invocation_policy_proto", 48 | ":package_load_metrics_proto", 49 | "@protobuf//:any_proto", 50 | "@protobuf//:duration_proto", 51 | "@protobuf//:timestamp_proto", 52 | ], 53 | ) 54 | 55 | proto_library( 56 | name = "command_line_proto", 57 | srcs = ["command_line.proto"], 58 | deps = [":option_filters_proto"], 59 | ) 60 | 61 | proto_library( 62 | name = "failure_details_proto", 63 | srcs = ["failure_details.proto"], 64 | deps = ["@protobuf//:descriptor_proto"], 65 | ) 66 | 67 | proto_library( 68 | name = "invocation_policy_proto", 69 | srcs = ["invocation_policy.proto"], 70 | ) 71 | 72 | proto_library( 73 | name = "option_filters_proto", 74 | srcs = ["option_filters.proto"], 75 | ) 76 | 77 | proto_library( 78 | name = "package_load_metrics_proto", 79 | srcs = ["package_load_metrics.proto"], 80 | deps = ["@protobuf//:duration_proto"], 81 | ) 82 | -------------------------------------------------------------------------------- /blade/bep/buildinfo.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use build_event_stream_proto::build_event_stream; 3 | use state::DBManager; 4 | 5 | pub(crate) struct Handler {} 6 | 7 | impl crate::EventHandler for Handler { 8 | fn handle_event( 9 | &self, 10 | db_mgr: &dyn DBManager, 11 | invocation_id: &str, 12 | event: &build_event_stream::BuildEvent, 13 | ) -> anyhow::Result<()> { 14 | match &event.payload { 15 | Some(build_event_stream::build_event::Payload::Started(p)) => { 16 | let mut db = db_mgr.get().context("failed to get db handle")?; 17 | let start = p.start_time.clone(); 18 | let command = p.command.clone(); 19 | db.update_shallow_invocation( 20 | invocation_id, 21 | Box::new(move |i: &mut state::InvocationResults| { 22 | i.start = start 23 | .as_ref() 24 | .and_then(|s| prototime::timestamp::from_proto(s).ok()) 25 | .unwrap_or_else(std::time::SystemTime::now); 26 | i.command = command; 27 | Ok(()) 28 | }), 29 | ) 30 | .context("failed to insert invocation")?; 31 | }, 32 | Some(build_event_stream::build_event::Payload::Expanded(_)) => { 33 | let mut db = db_mgr.get().context("failed to get db handle")?; 34 | let pattern = event 35 | .id 36 | .as_ref() 37 | .and_then(|id| { 38 | if let Some(build_event_stream::build_event_id::Id::Pattern(id)) = &id.id { 39 | return Some(id.pattern.clone()); 40 | } 41 | None 42 | }) 43 | .unwrap_or_default() 44 | .to_vec(); 45 | db.update_shallow_invocation( 46 | invocation_id, 47 | Box::new(move |i: &mut state::InvocationResults| { 48 | i.pattern = pattern; 49 | Ok(()) 50 | }), 51 | )?; 52 | }, 53 | _ => {}, 54 | } 55 | 56 | Ok(()) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /blade/routes/app.rs: -------------------------------------------------------------------------------- 1 | use components::nav::Nav; 2 | use darkmode::DarkMode; 3 | use leptos::prelude::*; 4 | use leptos_meta::*; 5 | use leptos_router::{components::*, path}; 6 | 7 | use crate::{ 8 | artifact::Artifact, 9 | dashboard::Dashboard, 10 | details::Details, 11 | empty::Empty, 12 | invocation::Invocation, 13 | profile::BazelProfile, 14 | summary::Summary, 15 | test::Test, 16 | }; 17 | 18 | #[component] 19 | pub fn App() -> impl IntoView { 20 | provide_meta_context(); 21 | let (dark_mode, set_dark_mode) = signal(DarkMode(false)); 22 | provide_context((dark_mode, set_dark_mode)); 23 | Effect::new(move || { 24 | set_dark_mode.set(DarkMode(darkmode::get())); 25 | }); 26 | let formatter = |text: String| { 27 | format!( 28 | "Blade{}{}", 29 | if text.is_empty() { "" } else { " - " }, 30 | if text.is_empty() { "" } else { &text }, 31 | ) 32 | }; 33 | view! { 34 | 35 | <Stylesheet id="leptos" href="/assets/style.css" /> 36 | <Html class:dark=move || dark_mode.read().0 /> 37 | <Router> 38 | <div 39 | id="root" 40 | class="h-screen w-screen max-w-screen max-h-screen dark:bg-gray-800 dark:placeholder-gray-400 dark:text-white overflow-clip" 41 | class:dark=move || dark_mode.read().0 42 | > 43 | <Nav name="Blade" logo="/assets/logo.svg" /> 44 | <main class="overflow-hidden"> 45 | <Routes fallback=|| "Not Found."> 46 | <ParentRoute path=path!("invocation/:id") view=Invocation> 47 | <Route path=path!("test") view=Test /> 48 | <Route path=path!("details") view=Details /> 49 | <Route path=path!("artifact") view=Artifact /> 50 | <Route path=path!("profile") view=BazelProfile /> 51 | <Route path=path!("*any") view=Summary /> 52 | </ParentRoute> 53 | <Route path=path!("dashboard") view=Dashboard /> 54 | <Route path=path!("/") view=Dashboard /> 55 | <Route path=path!("*any") view=Empty /> 56 | </Routes> 57 | </main> 58 | </div> 59 | </Router> 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BLADE (Build Log Analysis Dashboard Engine) 2 | 3 | ![CI](https://github.com/DolceTriade/blade/actions/workflows/ci.yaml/badge.svg) 4 | 5 | Blade is a Bazel BEP viewer. It's in its early stages, but should be generally functional. 6 | 7 | ![screenshot](img/ss.png) 8 | ![screenshot](img/testhistory.png) 9 | 10 | # Quick Start 11 | 12 | ## Docker + Emphemeral SQLite 13 | 14 | You can run this using Docker: 15 | 16 | ``` 17 | docker run --rm -p 3000:3000 -p 50332:50332 -v $HOME:$HOME ghcr.io/dolcetriade/blade:v0.23 --db_path sqlite:///tmp/blade.db -l 18 | ``` 19 | 20 | Then you can run: 21 | 22 | ``` 23 | bazel test -c opt --bes_backend=grpc://localhost:50332 --bes_results_url="http://localhost:3000/invocation/" //... 24 | ``` 25 | 26 | If you want the database to be persistent, you bind mount a location in. You need to bind mount `$HOME` so you can access build logs. If you have a local remote cache, you can use that instead. You can override bytestream locations with alternate locations by passing in `-o bytestream://original.com=http://127.0.0.1:3834`. This flag can be passed in multiple times. If you use a remote cache, you can omit the `-l` flag which prevents reading local files, which is a security risk since this would allow people to read arbitrary files on the host. 27 | 28 | ## PostgreSQL 29 | 30 | Assuming you can access postgresql from the docker container: 31 | 32 | ``` 33 | docker run --rm -p 3000:3000 -p 50332:50332 ghcr.io/dolcetriade/blade:v0.8 --db_path postgres://username:password@hostname/blade 34 | ``` 35 | 36 | Note that the database must exist prior to starting blade. 37 | 38 | # Building 39 | 40 | ## Environment 41 | 42 | Blade depends on bazel and nix to build. Nix is used to manage third party dependencies and bazel is used as the build system. 43 | 44 | First, install Nix: 45 | ``` 46 | curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install 47 | ``` 48 | 49 | Then run `nix develop --impure` in the source dir to set up the dev environment. Alternatiely, you can use direnv to automatically load the dev dependencies into the environment: `direnv allow` 50 | 51 | ## Running 52 | 53 | In one terminal, run: 54 | `bazel run //blade -- --db_path sqlite:///tmp/blade.db --allow_local` 55 | 56 | Then, in another, run: 57 | 58 | `bazel test -c opt --bes_backend=grpc://localhost:50332 --bes_results_url="http://localhost:3000/invocation/" //...` 59 | 60 | to test it out. 61 | -------------------------------------------------------------------------------- /blade/components/accordion.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | 3 | #[component] 4 | pub fn AccordionItem<F, IV>( 5 | header: F, 6 | children: Children, 7 | #[prop(optional, into)] header_class: String, 8 | #[prop(optional, default = false)] hide: bool, 9 | ) -> impl IntoView 10 | where 11 | F: Fn() -> IV, 12 | IV: IntoView, 13 | { 14 | let (hide, set_hide) = signal(hide); 15 | view! { 16 | <button 17 | type="button" 18 | on:click=move |_| set_hide.set(!hide.get()) 19 | class="flex w-full grow items-center justify-between p-5 font-medium rtl:text-right text-gray-500 border first:border-t-0 border-gray-200 first:rounded-t-xl last:rounded-b-xl focus:ring-4 focus:ring-gray-200 hover:bg-gray-100 gap-3 dark:border-gray-600 dark:focus:ring-gray-800 dark:hover:bg-gray-900" 20 | > 21 | <span class=header_class>{header()}</span> 22 | <span> 23 | <svg 24 | data-accordion-icon 25 | class=move || { 26 | format!( 27 | "transition-all w-3 h-3 shrink-0 {}", 28 | if hide.get() { "rotate-180" } else { "" }, 29 | ) 30 | } 31 | 32 | aria-hidden="true" 33 | xmlns="http://www.w3.org/2000/svg" 34 | fill="none" 35 | viewBox="0 0 10 6" 36 | > 37 | <path 38 | stroke="currentColor" 39 | stroke-linecap="round" 40 | stroke-linejoin="round" 41 | stroke-width="2" 42 | d="M9 5 5 1 1 5" 43 | ></path> 44 | </svg> 45 | </span> 46 | </button> 47 | <div class=move || { 48 | format!( 49 | "transition-all ease-in {}", 50 | if hide.get() { "max-h-0 absolute opacity-0 overflow-hidden" } else { "" }, 51 | ) 52 | }> 53 | <div class="p-5 border border-b-0 border-gray-200 dark:border-gray-600"> 54 | {children()} 55 | </div> 56 | </div> 57 | } 58 | } 59 | 60 | #[allow(non_snake_case)] 61 | #[component] 62 | pub fn Accordion(children: Children) -> impl IntoView { 63 | view! { <div class="m-0 p-0">{children()}</div> } 64 | } 65 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/correctness.bazelrc: -------------------------------------------------------------------------------- 1 | # Warn if a test's timeout is significantly longer than the test's actual execution time. 2 | # Bazel's default for test_timeout is medium (5 min), but most tests should instead be short (1 min). 3 | # While a test's timeout should be set such that it is not flaky, a test that has a highly 4 | # over-generous timeout can hide real problems that crop up unexpectedly. 5 | # For instance, a test that normally executes in a minute or two should not have a timeout of 6 | # ETERNAL or LONG as these are much, much too generous. 7 | # Docs: https://bazel.build/docs/user-manual#test-verbose-timeout-warnings 8 | test --test_verbose_timeout_warnings 9 | 10 | # Allow exclusive tests to run in the sandbox. Fixes a bug where Bazel doesn't enable sandboxing for 11 | # tests with `tags=["exclusive"]`. 12 | # Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_exclusive_test_sandboxed 13 | test --incompatible_exclusive_test_sandboxed 14 | 15 | # Propagate tags from a target declaration to the actions' execution requirements. 16 | # Ensures that tags applied in your BUILD file, like `tags=["no-remote"]` 17 | # get propagated to actions created by the rule. 18 | # Without this option, you rely on rules authors to manually check the tags you passed 19 | # and apply relevant ones to the actions they create. 20 | # See https://github.com/bazelbuild/bazel/issues/8830 for details. 21 | # Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_allow_tags_propagation 22 | build --incompatible_allow_tags_propagation 23 | fetch --incompatible_allow_tags_propagation 24 | query --incompatible_allow_tags_propagation 25 | 26 | # Do not automatically create `__init__.py` files in the runfiles of Python targets. Fixes the wrong 27 | # default that comes from Google's internal monorepo by using `__init__.py` to delimit a Python 28 | # package. Precisely, when a `py_binary` or `py_test` target has `legacy_create_init` set to `auto (the 29 | # default), it is treated as false if and only if this flag is set. See 30 | # https://github.com/bazelbuild/bazel/issues/10076. 31 | # Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_default_to_explicit_init_py 32 | build --incompatible_default_to_explicit_init_py 33 | 34 | # TODO(hmodi): Clamp down on network access for tests 35 | # TODO(hmodi): Clamp down on environment variable pollution: # Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_strict_action_env 36 | build --incompatible_strict_action_env 37 | -------------------------------------------------------------------------------- /blade/components/dashboard/duration_chart.rs: -------------------------------------------------------------------------------- 1 | use leptos::prelude::*; 2 | use state::TestHistory; 3 | 4 | use crate::{ 5 | charts::linechart::LineChart, 6 | navigation::open_in_new_tab, 7 | summaryheader::format_time, 8 | }; 9 | 10 | pub fn format_unix(t: f64) -> String { 11 | let d = std::time::Duration::from_secs_f64(t); 12 | let ss = std::time::SystemTime::UNIX_EPOCH 13 | .checked_add(d) 14 | .unwrap_or_else(std::time::SystemTime::now); 15 | format_time(&ss) 16 | } 17 | 18 | #[allow(non_snake_case)] 19 | #[component] 20 | pub fn DurationChart(history: TestHistory) -> impl IntoView { 21 | let on_point_click = |point: state::TestHistoryPoint| { 22 | let link = format!("/invocation/{}", point.invocation_id); 23 | open_in_new_tab(&link); 24 | }; 25 | 26 | // Sort data so that successful tests are rendered first and failed tests last 27 | // This ensures failed points (red) appear on top of successful points (green) 28 | // when they overlap 29 | let mut sorted_history = history.history; 30 | sorted_history.sort_by(|a, b| { 31 | // Put failures last (so they render on top) 32 | // Success = false, Failure = true, so failures come after successes 33 | matches!(a.test.status, state::Status::Fail) 34 | .cmp(&matches!(b.test.status, state::Status::Fail)) 35 | }); 36 | 37 | view! { 38 | <LineChart 39 | data=sorted_history 40 | x_accessor=|point| { 41 | point.start.duration_since(std::time::SystemTime::UNIX_EPOCH).unwrap().as_secs_f64() 42 | } 43 | y_accessor=|point| point.test.duration.as_secs_f64() 44 | line_color="#4299e1" 45 | point_color_accessor=|p| { 46 | (match p.test.status { 47 | state::Status::Success => "#48bb78", 48 | _ => "#f56565", 49 | }) 50 | .to_string() 51 | } 52 | tooltip_content_accessor=|point| { 53 | format!( 54 | "Invocation: {}\nDuration: {}\nDate: {}", 55 | point.invocation_id.chars().take(8).collect::<String>(), 56 | humantime::format_duration(point.test.duration), 57 | format_time(&point.start), 58 | ) 59 | } 60 | x_tick_formatter=Box::new(format_unix) 61 | on_point_click=on_point_click 62 | x_axis_label="Time" 63 | y_axis_label="Duration (s)" 64 | x_axis_label_rotation=10.0 65 | show_line=false 66 | /> 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /blade/routes/BUILD.bazel: -------------------------------------------------------------------------------- 1 | load("@rules_rust//rust:defs.bzl", "rust_library") 2 | 3 | rust_library( 4 | name = "routes", 5 | srcs = glob(["**/*.rs"]), 6 | crate_features = select({ 7 | "@platforms//cpu:wasm32": ["hydrate"], 8 | "//conditions:default": ["ssr"], 9 | }), 10 | rustc_env = { 11 | "SERVER_FN_OVERRIDE_KEY": "bazel", 12 | }, 13 | visibility = ["//visibility:public"], 14 | deps = select({ 15 | "@platforms//cpu:wasm32": [ 16 | "//blade/components", 17 | "//blade/darkmode", 18 | "//blade/shared", 19 | "//blade/state", 20 | "//blade/tailwindmerge", 21 | "//blade/trace_event_parser", 22 | "@wasm_crate//:anyhow", 23 | "@wasm_crate//:flate2", 24 | "@wasm_crate//:futures", 25 | "@wasm_crate//:gloo-file", 26 | "@wasm_crate//:gloo-net", 27 | "@wasm_crate//:humantime", 28 | "@wasm_crate//:junit-parser", 29 | "@wasm_crate//:leptos", 30 | "@wasm_crate//:leptos_dom", 31 | "@wasm_crate//:leptos_meta", 32 | "@wasm_crate//:leptos_router", 33 | "@wasm_crate//:log", 34 | "@wasm_crate//:serde", 35 | "@wasm_crate//:time", 36 | "@wasm_crate//:tracing", 37 | "@wasm_crate//:url", 38 | "@wasm_crate//:url-escape", 39 | "@wasm_crate//:wasm-bindgen", 40 | "@wasm_crate//:web-sys", 41 | "@wasm_crate//:zip", 42 | ], 43 | "//conditions:default": [ 44 | "//blade/bep", 45 | "//blade/bytestream", 46 | "//blade/components", 47 | "//blade/darkmode", 48 | "//blade/db", 49 | "//blade/shared", 50 | "//blade/state", 51 | "//blade/tailwindmerge", 52 | "//blade/trace_event_parser", 53 | "@crate//:anyhow", 54 | "@crate//:flate2", 55 | "@crate//:futures", 56 | "@crate//:humantime", 57 | "@crate//:junit-parser", 58 | "@crate//:leptos", 59 | "@crate//:leptos_dom", 60 | "@crate//:leptos_meta", 61 | "@crate//:leptos_router", 62 | "@crate//:log", 63 | "@crate//:serde", 64 | "@crate//:time", 65 | "@crate//:tokio", 66 | "@crate//:tokio-stream", 67 | "@crate//:tracing", 68 | "@crate//:url", 69 | "@crate//:url-escape", 70 | "@crate//:wasm-bindgen", 71 | "@crate//:web-sys", 72 | "@crate//:zip", 73 | ], 74 | }), 75 | ) 76 | -------------------------------------------------------------------------------- /blade/db/postgres/schema.rs: -------------------------------------------------------------------------------- 1 | // @generated automatically by Diesel CLI. 2 | 3 | diesel::table! { 4 | invocations (id) { 5 | id -> Text, 6 | status -> Text, 7 | start -> Timestamptz, 8 | end -> Nullable<Timestamptz>, 9 | command -> Text, 10 | pattern -> Nullable<Text>, 11 | last_heartbeat -> Nullable<Timestamptz>, 12 | profile_uri -> Nullable<Text>, 13 | } 14 | } 15 | 16 | diesel::table! { 17 | options (id) { 18 | id -> Text, 19 | invocation_id -> Text, 20 | kind -> Text, 21 | keyval -> Text, 22 | } 23 | } 24 | 25 | diesel::table! { 26 | targets (id) { 27 | id -> Text, 28 | invocation_id -> Text, 29 | name -> Text, 30 | status -> Text, 31 | kind -> Text, 32 | start -> Timestamptz, 33 | end -> Nullable<Timestamptz>, 34 | } 35 | } 36 | 37 | diesel::table! { 38 | testartifacts (id) { 39 | id -> Text, 40 | invocation_id -> Text, 41 | test_run_id -> Text, 42 | name -> Text, 43 | uri -> Text, 44 | } 45 | } 46 | 47 | diesel::table! { 48 | testruns (id) { 49 | id -> Text, 50 | invocation_id -> Text, 51 | test_id -> Text, 52 | run -> Int4, 53 | shard -> Int4, 54 | attempt -> Int4, 55 | status -> Text, 56 | details -> Text, 57 | duration_s -> Float8, 58 | } 59 | } 60 | 61 | diesel::table! { 62 | tests (id) { 63 | id -> Text, 64 | invocation_id -> Text, 65 | name -> Text, 66 | status -> Text, 67 | duration_s -> Nullable<Float8>, 68 | end -> Timestamptz, 69 | num_runs -> Nullable<Int4>, 70 | } 71 | } 72 | 73 | diesel::table! { 74 | invocationoutput (id) { 75 | id -> Int4, 76 | invocation_id -> Text, 77 | line -> Text, 78 | } 79 | } 80 | 81 | diesel::table! { 82 | unique_test_names (name) { 83 | name -> Text, 84 | } 85 | } 86 | 87 | diesel::joinable!(options -> invocations (invocation_id)); 88 | diesel::joinable!(targets -> invocations (invocation_id)); 89 | diesel::joinable!(testartifacts -> invocations (invocation_id)); 90 | diesel::joinable!(testartifacts -> testruns (test_run_id)); 91 | diesel::joinable!(testruns -> invocations (invocation_id)); 92 | diesel::joinable!(testruns -> tests (test_id)); 93 | diesel::joinable!(tests -> invocations (invocation_id)); 94 | diesel::joinable!(invocationoutput -> invocations (invocation_id)); 95 | 96 | diesel::allow_tables_to_appear_in_same_query!( 97 | invocations, 98 | invocationoutput, 99 | options, 100 | targets, 101 | testartifacts, 102 | testruns, 103 | tests, 104 | unique_test_names, 105 | ); 106 | -------------------------------------------------------------------------------- /blade/db/sqlite/schema.rs: -------------------------------------------------------------------------------- 1 | // @generated automatically by Diesel CLI. 2 | 3 | diesel::table! { 4 | Invocations (id) { 5 | id -> Text, 6 | status -> Text, 7 | start -> TimestamptzSqlite, 8 | end -> Nullable<TimestamptzSqlite>, 9 | command -> Text, 10 | pattern -> Nullable<Text>, 11 | last_heartbeat -> Nullable<TimestamptzSqlite>, 12 | profile_uri -> Nullable<Text>, 13 | } 14 | } 15 | 16 | diesel::table! { 17 | Options (id) { 18 | id -> Text, 19 | invocation_id -> Text, 20 | kind -> Text, 21 | keyval -> Text, 22 | } 23 | } 24 | 25 | diesel::table! { 26 | Targets (id) { 27 | id -> Text, 28 | invocation_id -> Text, 29 | name -> Text, 30 | status -> Text, 31 | kind -> Text, 32 | start -> TimestamptzSqlite, 33 | end -> Nullable<TimestamptzSqlite>, 34 | } 35 | } 36 | 37 | diesel::table! { 38 | TestArtifacts (id) { 39 | id -> Text, 40 | invocation_id -> Text, 41 | test_run_id -> Text, 42 | name -> Text, 43 | uri -> Text, 44 | } 45 | } 46 | 47 | diesel::table! { 48 | TestRuns (id) { 49 | id -> Text, 50 | invocation_id -> Text, 51 | test_id -> Text, 52 | run -> Integer, 53 | shard -> Integer, 54 | attempt -> Integer, 55 | status -> Text, 56 | details -> Text, 57 | duration_s -> Double, 58 | } 59 | } 60 | 61 | diesel::table! { 62 | Tests (id) { 63 | id -> Text, 64 | invocation_id -> Text, 65 | name -> Text, 66 | status -> Text, 67 | duration_s -> Nullable<Double>, 68 | end -> TimestamptzSqlite, 69 | num_runs -> Nullable<Integer>, 70 | } 71 | } 72 | 73 | diesel::table! { 74 | InvocationOutput (id) { 75 | id -> Integer, 76 | invocation_id -> Text, 77 | line -> Text, 78 | } 79 | } 80 | 81 | diesel::table! { 82 | unique_test_names (name) { 83 | name -> Text, 84 | } 85 | } 86 | 87 | diesel::joinable!(Options -> Invocations (invocation_id)); 88 | diesel::joinable!(Targets -> Invocations (invocation_id)); 89 | diesel::joinable!(TestArtifacts -> Invocations (invocation_id)); 90 | diesel::joinable!(TestArtifacts -> TestRuns (test_run_id)); 91 | diesel::joinable!(TestRuns -> Invocations (invocation_id)); 92 | diesel::joinable!(TestRuns -> Tests (test_id)); 93 | diesel::joinable!(Tests -> Invocations (invocation_id)); 94 | diesel::joinable!(InvocationOutput -> Invocations (invocation_id)); 95 | 96 | diesel::allow_tables_to_appear_in_same_query!( 97 | Invocations, 98 | Options, 99 | Targets, 100 | TestArtifacts, 101 | TestRuns, 102 | Tests, 103 | unique_test_names, 104 | ); 105 | -------------------------------------------------------------------------------- /.bazel-lib/bazelrc/performance.bazelrc: -------------------------------------------------------------------------------- 1 | # Merkle tree calculations will be memoized to improve the remote cache hit checking speed. The 2 | # memory foot print of the cache is controlled by `--experimental_remote_merkle_tree_cache_size`. 3 | # Docs: https://bazel.build/reference/command-line-reference#flag--experimental_remote_merkle_tree_cache 4 | build --experimental_remote_merkle_tree_cache 5 | query --experimental_remote_merkle_tree_cache 6 | 7 | # The number of Merkle trees to memoize to improve the remote cache hit checking speed. Even though 8 | # the cache is automatically pruned according to Java's handling of soft references, out-of-memory 9 | # errors can occur if set too high. If set to 0 the cache size is unlimited. Optimal value varies 10 | # depending on project's size. 11 | # Docs: https://bazel.build/reference/command-line-reference#flag--experimental_remote_merkle_tree_cache_size 12 | build --experimental_remote_merkle_tree_cache_size=1000 13 | query --experimental_remote_merkle_tree_cache_size=1000 14 | 15 | # Speed up all builds by not checking if output files have been modified. Lets you make changes to 16 | # the output tree without triggering a build for local debugging. For example, you can modify 17 | # [rules_js](https://github.com/aspect-build/rules_js) 3rd party npm packages in the output tree 18 | # when local debugging. 19 | # Docs: https://github.com/bazelbuild/bazel/blob/1af61b21df99edc2fc66939cdf14449c2661f873/src/main/java/com/google/devtools/build/lib/pkgcache/PackageOptions.java#L185 20 | build --noexperimental_check_output_files 21 | fetch --noexperimental_check_output_files 22 | query --noexperimental_check_output_files 23 | 24 | # Directories used by sandboxed non-worker execution may be reused to avoid unnecessary setup costs. 25 | # Save time on Sandbox creation and deletion when many of the same kind of action run during the 26 | # build. 27 | # No longer experimental in Bazel 6: https://github.com/bazelbuild/bazel/commit/c1a95501a5611878e5cc43a3cc531f2b9e47835b 28 | # Docs: https://bazel.build/reference/command-line-reference#flag--reuse_sandbox_directories 29 | build --experimental_reuse_sandbox_directories 30 | 31 | # Some actions are always IO-intensive but require little compute. It's wasteful to put the output 32 | # in the remote cache, it just saturates the network and fills the cache storage causing earlier 33 | # evictions. It's also not worth sending them for remote execution. 34 | # For actions like PackageTar and GoStdLib actions it's usually faster to just re-run the work locally every time. 35 | # You'll have to look at an execution log to figure out what other action mnemonics you care about. 36 | # In some cases you may need to patch rulesets to add a mnemonic to actions that don't have one. 37 | # https://bazel.build/reference/command-line-reference#flag--modify_execution_info 38 | build --modify_execution_info='PackageTar=+no-remote,GoStdlib.*=+no-remote' 39 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "BLADE"; 3 | inputs = { 4 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 5 | flake-utils.url = "github:numtide/flake-utils"; 6 | devenv.url = "github:cachix/devenv/latest"; 7 | fenix.url = "github:nix-community/fenix"; 8 | }; 9 | outputs = { 10 | self, 11 | nixpkgs, 12 | flake-utils, 13 | devenv, 14 | fenix, 15 | ... 16 | } @ inputs: (flake-utils.lib.eachDefaultSystem 17 | (system: let 18 | pkgs = import nixpkgs { 19 | inherit system; 20 | overlays = [fenix.overlays.default]; 21 | }; 22 | 23 | bazelEnv = import ./nix/bazel/env.nix {inherit pkgs;}; 24 | rust = with pkgs.fenix; 25 | with latest; 26 | combine [ 27 | cargo 28 | clippy 29 | rust-src 30 | rustc 31 | rustfmt 32 | targets.wasm32-unknown-unknown.latest.rust-std 33 | rust-analyzer 34 | ]; 35 | 36 | rustPlatform = pkgs.makeRustPlatform { 37 | cargo = rust; 38 | rustc = rust; 39 | }; 40 | leptosfmt = pkgs.callPackage ./third_party/nix/leptosfmt {inherit rustPlatform;}; 41 | diesel-cli' = pkgs.callPackage ./third_party/nix/diesel_cli { 42 | inherit rustPlatform; 43 | mysqlSupport = false; 44 | }; 45 | jemalloc' = pkgs.callPackage ./third_party/nix/jemalloc {}; 46 | ibazel = pkgs.writeShellScriptBin "ibazel" '' 47 | ${pkgs.bazel-watcher}/bin/ibazel -bazel_path ${pkgs.bazel_8}/bin/bazel "$@" 48 | ''; 49 | in { 50 | packages.rust = rust; 51 | formatter.default = pkgs.alejandra; 52 | devShells.default = devenv.lib.mkShell { 53 | inherit inputs pkgs; 54 | modules = [ 55 | ({pkgs, ...}: { 56 | packages = with pkgs; 57 | [ 58 | alejandra 59 | bazel_8 60 | ibazel 61 | bazel-buildtools 62 | pkg-config 63 | rust 64 | grpcurl 65 | git 66 | leptosfmt 67 | jemalloc' 68 | diesel-cli' 69 | wabt 70 | postgresql 71 | flamegraph 72 | tokio-console 73 | (import ./nix/cc/cc.nix {inherit pkgs;}) 74 | ] 75 | ++ pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.darwin.cctools; 76 | enterShell = 77 | '' 78 | echo "BLADE Shell" 79 | echo "build --action_env=PATH=${bazelEnv}/bin" > .bazelenvrc 80 | echo "build --host_action_env=PATH=${bazelEnv}/bin" >> .bazelenvrc 81 | '' 82 | + pkgs.lib.optionalString (pkgs.stdenv.isDarwin) '' 83 | echo "build --action_env=DEVELOPER_DIR=${pkgs.apple-sdk}" >> .bazelenvrc 84 | echo "build --host_action_env=DEVELOPER_DIR=${pkgs.apple-sdk}" >> .bazelenvrc 85 | ''; 86 | }) 87 | ]; 88 | }; 89 | })); 90 | } 91 | -------------------------------------------------------------------------------- /nix/cc/cc.nix: -------------------------------------------------------------------------------- 1 | # We need to write a fancy nix file to handle the CC compiler due to OSX: https://github.com/tweag/rules_nixpkgs/issues/368 2 | # What this file is basically saying is: if OSX, change the CXX compiler to use a bunch of Apple frameworks, LLVM libs, 3 | # libc++ instead of libstdc++, and some additional compiler flags to ignore some warnings, otherwise, just use clang11 4 | {pkgs ? import <nixpkgs> {}}: let 5 | clang = pkgs.clang; 6 | path = builtins.storePath pkgs.path; 7 | darwinCC = 8 | # Work around https://github.com/NixOS/nixpkgs/issues/42059. 9 | # See also https://github.com/NixOS/nixpkgs/pull/41589. 10 | pkgs.wrapCCWith rec { 11 | cc = clang; 12 | bintools = pkgs.stdenv.cc.bintools; 13 | extraBuildCommands = '' 14 | echo "-Wno-unused-command-line-argument" >> $out/nix-support/cc-cflags 15 | echo "-Wno-elaborated-enum-base" >> $out/nix-support/cc-cflags 16 | echo "-isystem ${pkgs.llvmPackages.libcxx.dev}/include/c++/v1" >> $out/nix-support/cc-cflags 17 | echo "-isystem ${pkgs.llvmPackages.clang-unwrapped.lib}/lib/clang/${cc.version}/include" >> $out/nix-support/cc-cflags 18 | echo "-L${pkgs.llvmPackages.libcxx}/lib" >> $out/nix-support/cc-cflags 19 | echo "-L${pkgs.libiconv}/lib" >> $out/nix-support/cc-cflags 20 | echo "-resource-dir=${pkgs.stdenv.cc}/resource-root" >> $out/nix-support/cc-cflags 21 | ''; 22 | }; 23 | linuxCC = pkgs.wrapCCWith rec { 24 | cc = clang; 25 | bintools = pkgs.stdenv.cc.bintools.override { 26 | extraBuildCommands = '' 27 | wrap ${pkgs.stdenv.cc.bintools.targetPrefix}ld.lld ${path}/pkgs/build-support/bintools-wrapper/ld-wrapper.sh ${pkgs.lld}/bin/ld.lld 28 | wrap ${pkgs.stdenv.cc.bintools.targetPrefix}ld ${path}/pkgs/build-support/bintools-wrapper/ld-wrapper.sh ${pkgs.lld}/bin/ld.lld 29 | wrap ${pkgs.stdenv.cc.bintools.targetPrefix}lld ${path}/pkgs/build-support/bintools-wrapper/ld-wrapper.sh ${pkgs.lld}/bin/ld.lld 30 | # Fake being gold because rules_nixpkgs forces this. 31 | wrap ${pkgs.stdenv.cc.bintools.targetPrefix}ld.gold ${path}/pkgs/build-support/bintools-wrapper/ld-wrapper.sh ${pkgs.lld}/bin/ld.lld 32 | ''; 33 | }; 34 | extraPackages = [pkgs.glibc.static]; 35 | extraBuildCommands = '' 36 | echo "-isystem ${pkgs.llvmPackages.clang-unwrapped.lib}/lib/clang/${cc.version}/include" >> $out/nix-support/cc-cflags 37 | echo "-L ${pkgs.glibc.static}/lib" >> $out/nix-support/cc-ldflags 38 | echo "-resource-dir=${cc}/resource-root" >> $out/nix-support/cc-cflags 39 | ''; 40 | }; 41 | in 42 | pkgs.buildEnv ( 43 | let 44 | cc = 45 | if pkgs.stdenv.isDarwin 46 | then darwinCC 47 | else linuxCC; 48 | in { 49 | name = "bazel-nixpkgs-cc"; 50 | # XXX: `gcov` is missing in `/bin`. 51 | # It exists in `stdenv.cc.cc` but that collides with `stdenv.cc`. 52 | paths = [cc cc.bintools] ++ pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.darwin.cctools; 53 | pathsToLink = ["/bin"]; 54 | passthru = { 55 | inherit (cc) isClang targetPrefix; 56 | orignalName = cc.name; 57 | }; 58 | } 59 | ) 60 | -------------------------------------------------------------------------------- /blade/routes/summary.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "ssr")] 2 | use std::sync::Arc; 3 | 4 | use components::{ 5 | card::Card, 6 | shellout::ShellOut, 7 | summaryheader::SummaryHeader, 8 | targetlist::TargetList, 9 | }; 10 | use leptos::prelude::*; 11 | use leptos_router::hooks::use_params; 12 | 13 | use crate::invocation::InvocationParams; 14 | 15 | #[cfg(feature = "ssr")] 16 | pub(crate) fn internal_err<T: std::fmt::Display>(e: T) -> ServerFnError { 17 | ServerFnError::ServerError(format!("Invocation {e} not found")) 18 | } 19 | 20 | #[server] 21 | pub async fn get_output(uuid: String) -> Result<String, ServerFnError> { 22 | let global: Arc<state::Global> = use_context::<Arc<state::Global>>().unwrap(); 23 | let id = uuid.clone(); 24 | db::run_group(global.db_manager.clone(), move |db| db.get_progress(&id)) 25 | .await 26 | .map_err(internal_err) 27 | } 28 | 29 | #[allow(non_snake_case)] 30 | #[component] 31 | pub fn Summary() -> impl IntoView { 32 | let params = use_params::<InvocationParams>(); 33 | let invocation = expect_context::<RwSignal<state::InvocationResults>>(); 34 | let (output, set_output) = signal("".to_string()); 35 | let output_res = LocalResource::new(move || { 36 | let id = params 37 | .with(|p| p.as_ref().map(|p| p.id.clone()).unwrap_or_default()) 38 | .unwrap_or_default(); 39 | async move { 40 | if id.is_empty() { 41 | return "".to_string(); 42 | } 43 | match get_output(id).await { 44 | Ok(v) => v, 45 | Err(e) => format!("{e:#?}"), 46 | } 47 | } 48 | }); 49 | Effect::new(move || { 50 | let out = output_res.read(); 51 | match out.as_ref() { 52 | Some(out) => set_output(out.clone()), 53 | None => { 54 | if output.with(|o| o.is_empty()) { 55 | set_output("Loading...".to_string()); 56 | } 57 | }, 58 | } 59 | let done = matches!( 60 | invocation.read_untracked().status, 61 | state::Status::Success 62 | | state::Status::Fail 63 | | state::Status::Skip 64 | | state::Status::Unknown 65 | ) || !invocation.read_untracked().is_live; 66 | if !done { 67 | set_timeout( 68 | move || output_res.refetch(), 69 | std::time::Duration::from_secs(10), 70 | ); 71 | } 72 | }); 73 | 74 | view! { 75 | <div class="flex flex-col m-1 p-1 dark:bg-gray-800 overflow-hidden"> 76 | <Card class="flex p-3 m-2"> 77 | <SummaryHeader /> 78 | </Card> 79 | 80 | <div class="h-[73dvh] flex items-start justify-start justify-items-center overflow-hidden"> 81 | <Card class="h-full w-1/4 max-w-1/4 md:max-w-xs p-1 m-1 flex-1 overflow-x-auto overflow-auto"> 82 | {TargetList()} 83 | </Card> 84 | <Card class="h-full max-w-full w-full p-1 m-1 flex-1 overflow-x-auto overflow-auto"> 85 | <ShellOut text=output /> 86 | </Card> 87 | </div> 88 | </div> 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /blade/components/statusicon.rs: -------------------------------------------------------------------------------- 1 | use leptos::{either::EitherOf4, prelude::*}; 2 | use tailwindmerge::tailwind_merge; 3 | 4 | use crate::tooltip::Tooltip; 5 | 6 | #[allow(non_snake_case)] 7 | #[component] 8 | pub fn StatusIcon( 9 | status: Signal<state::Status>, 10 | #[prop(into, default = "".into())] class: Signal<String>, 11 | ) -> impl IntoView { 12 | move || { 13 | let c = move || class.get(); 14 | match status.get() { 15 | state::Status::Success => EitherOf4::A(view! { 16 | <Tooltip tooltip=move || view! { <span class="p-2">Success</span> }> 17 | <img class=c src="/assets/success.svg" /> 18 | </Tooltip> 19 | }), 20 | state::Status::Fail => EitherOf4::B(view! { 21 | <Tooltip tooltip=move || view! { <span class="p-2">Fail</span> }> 22 | <img class=c src="/assets/fail.svg" /> 23 | </Tooltip> 24 | }), 25 | state::Status::Skip => EitherOf4::C(view! { 26 | <Tooltip tooltip=move || view! { <span class="p-2">Skip</span> }> 27 | <img class=c src="/assets/skip.svg" /> 28 | </Tooltip> 29 | }), 30 | _ => EitherOf4::D(view! { 31 | <Tooltip tooltip=move || view! { <span class="p-2">In Progress</span> }> 32 | <div role="status"> 33 | <svg 34 | aria-hidden="true" 35 | class=move || tailwind_merge( 36 | "text-gray-200 animate-spin fill-blue-600", 37 | &c(), 38 | ) 39 | 40 | viewBox="0 0 100 101" 41 | fill="none" 42 | xmlns="http://www.w3.org/2000/svg" 43 | > 44 | <path 45 | d="M100 50.5908C100 78.2051 77.6142 100.591 50 100.591C22.3858 100.591 0 78.2051 0 50.5908C0 22.9766 22.3858 0.59082 50 0.59082C77.6142 0.59082 100 22.9766 100 50.5908ZM9.08144 50.5908C9.08144 73.1895 27.4013 91.5094 50 91.5094C72.5987 91.5094 90.9186 73.1895 90.9186 50.5908C90.9186 27.9921 72.5987 9.67226 50 9.67226C27.4013 9.67226 9.08144 27.9921 9.08144 50.5908Z" 46 | fill="currentColor" 47 | ></path> 48 | <path 49 | d="M93.9676 39.0409C96.393 38.4038 97.8624 35.9116 97.0079 33.5539C95.2932 28.8227 92.871 24.3692 89.8167 20.348C85.8452 15.1192 80.8826 10.7238 75.2124 7.41289C69.5422 4.10194 63.2754 1.94025 56.7698 1.05124C51.7666 0.367541 46.6976 0.446843 41.7345 1.27873C39.2613 1.69328 37.813 4.19778 38.4501 6.62326C39.0873 9.04874 41.5694 10.4717 44.0505 10.1071C47.8511 9.54855 51.7191 9.52689 55.5402 10.0491C60.8642 10.7766 65.9928 12.5457 70.6331 15.2552C75.2735 17.9648 79.3347 21.5619 82.5849 25.841C84.9175 28.9121 86.7997 32.2913 88.1811 35.8758C89.083 38.2158 91.5421 39.6781 93.9676 39.0409Z" 50 | fill="currentFill" 51 | ></path> 52 | </svg> 53 | <span class="sr-only">Loading...</span> 54 | </div> 55 | </Tooltip> 56 | }), 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /blade/db/exec.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, atomic::AtomicU32}; 2 | 3 | use anyhow::Result; 4 | use lazy_static::lazy_static; 5 | use prometheus_client::metrics::{counter::Counter, gauge::Gauge, histogram::Histogram}; 6 | use tracing::instrument; 7 | 8 | lazy_static! { 9 | static ref DB_EXEC_TOTAL: Counter::<u64> = metrics::register_metric( 10 | "blade_db_exec_total", 11 | "Total number of DB executions via spawn_blocking", 12 | Counter::default() 13 | ); 14 | static ref DB_EXEC_GROUPED_TOTAL: Counter::<u64> = metrics::register_metric( 15 | "blade_db_exec_grouped_total", 16 | "Total number of grouped DB executions", 17 | Counter::default() 18 | ); 19 | static ref DB_BLOCKING_INFLIGHT: Gauge::<u32, AtomicU32> = metrics::register_metric( 20 | "blade_db_blocking_inflight", 21 | "Number of DB operations currently running in spawn_blocking", 22 | Gauge::default() 23 | ); 24 | static ref DB_EXEC_DURATION: Histogram = { 25 | let buckets = [ 26 | 0.001, 0.005, 0.010, 0.025, 0.050, 0.100, 0.250, 0.500, 1.0, 2.5, 5.0, 10.0, 27 | ]; 28 | metrics::register_metric( 29 | "blade_db_exec_duration_seconds", 30 | "Duration of DB operations in spawn_blocking", 31 | Histogram::new(buckets.into_iter()), 32 | ) 33 | }; 34 | } 35 | 36 | /// Execute a single database operation in a blocking task. 37 | /// This is the basic helper for migrating sync DB calls to async contexts. 38 | #[instrument(skip(mgr, f), name = "db_exec")] 39 | pub async fn run<T, F>(mgr: Arc<dyn state::DBManager>, f: F) -> Result<T> 40 | where 41 | T: Send + 'static, 42 | F: FnOnce(&mut dyn state::DB) -> Result<T> + Send + 'static, 43 | { 44 | DB_EXEC_TOTAL.inc(); 45 | DB_BLOCKING_INFLIGHT.inc(); 46 | let start = std::time::Instant::now(); 47 | 48 | let result = tokio::task::spawn_blocking(move || { 49 | let mut db = mgr.get()?; 50 | f(db.as_mut()) 51 | }) 52 | .await 53 | .map_err(|e| anyhow::anyhow!("DB task join error: {e}"))?; 54 | 55 | DB_BLOCKING_INFLIGHT.dec(); 56 | DB_EXEC_DURATION.observe(start.elapsed().as_secs_f64()); 57 | result 58 | } 59 | 60 | /// Execute multiple database operations in a single blocking task. 61 | /// This amortizes the spawn_blocking overhead for related operations. 62 | #[instrument(skip(mgr, f), name = "db_exec_group")] 63 | pub async fn run_group<T, F>(mgr: Arc<dyn state::DBManager>, f: F) -> Result<T> 64 | where 65 | T: Send + 'static, 66 | F: FnOnce(&mut dyn state::DB) -> Result<T> + Send + 'static, 67 | { 68 | DB_EXEC_GROUPED_TOTAL.inc(); 69 | DB_BLOCKING_INFLIGHT.inc(); 70 | let start = std::time::Instant::now(); 71 | 72 | let result = tokio::task::spawn_blocking(move || { 73 | let mut db = mgr.get()?; 74 | f(db.as_mut()) 75 | }) 76 | .await 77 | .map_err(|e| anyhow::anyhow!("DB task join error: {e}"))?; 78 | 79 | DB_BLOCKING_INFLIGHT.dec(); 80 | DB_EXEC_DURATION.observe(start.elapsed().as_secs_f64()); 81 | result 82 | } 83 | 84 | /// Execute database operations within a transaction. 85 | /// The closure must be synchronous and perform all DB operations sequentially. 86 | #[instrument(skip(mgr, f), name = "db_transaction")] 87 | pub async fn transaction<T, F>(mgr: Arc<dyn state::DBManager>, f: F) -> Result<T> 88 | where 89 | T: Send + 'static, 90 | F: FnOnce(&mut dyn state::DB) -> Result<T> + Send + 'static, 91 | { 92 | // For now, this is identical to run_group but provides semantic clarity 93 | // Later we could add actual transaction handling if needed 94 | run_group(mgr, f).await 95 | } 96 | -------------------------------------------------------------------------------- /blade/components/shellout.rs: -------------------------------------------------------------------------------- 1 | use std::hash::{DefaultHasher, Hash, Hasher}; 2 | 3 | use leptos::{html::*, prelude::*}; 4 | use web_sys::{Blob, Url, js_sys::Array, window}; 5 | 6 | const TRUNCATE_THRESHOLD: usize = 500; 7 | const MAX_DISPLAY_LINES: usize = TRUNCATE_THRESHOLD * 2; 8 | 9 | #[allow(non_snake_case)] 10 | #[component] 11 | pub fn ShellOut(#[prop(into)] text: Signal<String>) -> impl IntoView { 12 | let (truncated, set_truncated) = signal(false); 13 | let (force_show, set_force_show) = signal(false); 14 | let lines = Memo::new(move |_| { 15 | let text = text.read(); 16 | let lines: Vec<&str> = text.lines().collect(); 17 | let mut should_truncate = lines.len() > MAX_DISPLAY_LINES; 18 | if force_show.get() { 19 | should_truncate = false; 20 | } 21 | set_truncated.set(should_truncate); 22 | lines 23 | .clone() 24 | .into_iter() 25 | .enumerate() 26 | .filter(|(i, _)| { 27 | if should_truncate { 28 | *i < TRUNCATE_THRESHOLD || *i > lines.len() - TRUNCATE_THRESHOLD 29 | } else { 30 | true 31 | } 32 | }) 33 | .map(|(i, s)| (i, s.to_string())) 34 | .collect::<Vec<(usize, String)>>() 35 | }); 36 | 37 | let open_raw_output = move |_| { 38 | let array = Array::new(); 39 | let js_text: wasm_bindgen::JsValue = text.get().into(); 40 | array.push(&js_text); 41 | // Create the blob 42 | let blob = Blob::new_with_str_sequence(&array).unwrap(); 43 | 44 | // Create the blob URL 45 | let blob_url = Url::create_object_url_with_blob(&blob).unwrap(); 46 | 47 | if let Some(window) = window() { 48 | let _ = window.open_with_url_and_target(&blob_url, "_blank"); 49 | } 50 | }; 51 | 52 | let show_all = move |_| { 53 | set_force_show.set(true); 54 | }; 55 | 56 | view! { 57 | <div class="bg-gray-800 text-white p-4 rounded-lg overflow-auto h-full"> 58 | <Show when=move || { truncated.get() } fallback=|| view! { <></> }> 59 | <div class="bg-yellow-600 text-white p-2 mb-2 rounded"> 60 | "Output truncated. Showing first " {TRUNCATE_THRESHOLD} " and last " 61 | {TRUNCATE_THRESHOLD} " lines." 62 | <button 63 | on:click=open_raw_output 64 | class="ml-4 px-3 py-1 bg-blue-700 hover:bg-blue-800 rounded text-sm" 65 | > 66 | "View Full Raw Output" 67 | </button> 68 | <button 69 | on:click=show_all 70 | class="ml-4 px-3 py-1 bg-blue-700 hover:bg-blue-800 rounded text-sm" 71 | > 72 | "Load Full Output (slow)" 73 | </button> 74 | </div> 75 | </Show> 76 | <For 77 | each=move || lines.get() 78 | key=|line| { 79 | let mut h = DefaultHasher::new(); 80 | line.0.hash(&mut h); 81 | line.1.hash(&mut h); 82 | h.finish() 83 | } 84 | children=move |line| { 85 | let converted = ansi_to_html::convert_escaped(&line.1) 86 | .unwrap_or_else(|_| line.1.clone()); 87 | view! { <div class="whitespace-pre font-mono" inner_html=converted></div> } 88 | } 89 | /> 90 | </div> 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /third_party/nix/jemalloc/default.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | stdenv, 4 | fetchurl, 5 | fetchpatch, 6 | disableInitExecTls ? false, 7 | enableProf ? true, 8 | # By default, jemalloc puts a je_ prefix onto all its symbols on OSX, which 9 | # then stops downstream builds (mariadb in particular) from detecting it. This 10 | # option should remove the prefix and give us a working jemalloc. 11 | # Causes segfaults with some software (ex. rustc), but defaults to true for backward 12 | # compatibility. 13 | stripPrefix ? stdenv.hostPlatform.isDarwin, 14 | }: 15 | stdenv.mkDerivation rec { 16 | pname = "jemalloc"; 17 | version = "5.3.0"; 18 | 19 | src = fetchurl { 20 | url = "https://github.com/jemalloc/jemalloc/releases/download/${version}/${pname}-${version}.tar.bz2"; 21 | sha256 = "sha256-LbgtHnEZ3z5xt2QCGbbf6EeJvAU3mDw7esT3GJrs/qo="; 22 | }; 23 | 24 | patches = [ 25 | # fix tests under --with-jemalloc-prefix=, see https://github.com/jemalloc/jemalloc/pull/2340 26 | (fetchpatch { 27 | url = "https://github.com/jemalloc/jemalloc/commit/d00ecee6a8dfa90afcb1bbc0858985c17bef6559.patch"; 28 | hash = "sha256-N5i4IxGJ4SSAgFiq5oGRnrNeegdk2flw9Sh2mP0yl4c="; 29 | }) 30 | # fix linking with libc++, can be removed in the next update (after 5.3.0). 31 | # https://github.com/jemalloc/jemalloc/pull/2348 32 | (fetchpatch { 33 | url = "https://github.com/jemalloc/jemalloc/commit/4422f88d17404944a312825a1aec96cd9dc6c165.patch"; 34 | hash = "sha256-dunkE7XHzltn5bOb/rSHqzpRniAFuGubBStJeCxh0xo="; 35 | }) 36 | ]; 37 | 38 | configureFlags = 39 | ( 40 | if stdenv.hostPlatform.isDarwin 41 | then ["--with-private-namespace=_rjem_" "--with-jemalloc-prefix=_rjem_"] 42 | else ["--with-private-namespace=" "--with-jemalloc-prefix="] 43 | ) 44 | ++ ["--enable-static" "--disable-cxx"] 45 | ++ lib.optional disableInitExecTls "--disable-initial-exec-tls" 46 | # jemalloc is unable to correctly detect transparent hugepage support on 47 | # ARM (https://github.com/jemalloc/jemalloc/issues/526), and the default 48 | # kernel ARMv6/7 kernel does not enable it, so we explicitly disable support 49 | ++ lib.optionals (stdenv.hostPlatform.isAarch32 && lib.versionOlder version "5") [ 50 | "--disable-thp" 51 | "je_cv_thp=no" 52 | ] 53 | ++ lib.optional enableProf "--enable-prof" 54 | # AArch64 has configurable page size up to 64k. The default configuration 55 | # for jemalloc only supports 4k page sizes. 56 | ++ lib.optional stdenv.hostPlatform.isAarch64 "--with-lg-page=16" 57 | # See https://github.com/jemalloc/jemalloc/issues/1997 58 | # Using a value of 48 should work on both emulated and native x86_64-darwin. 59 | ++ lib.optional (stdenv.hostPlatform.isDarwin && stdenv.hostPlatform.isx86_64) "--with-lg-vaddr=48"; 60 | 61 | env.NIX_CFLAGS_COMPILE = 62 | (lib.optionalString stdenv.hostPlatform.isDarwin "-Wno-error=array-bounds") 63 | + (lib.optionalString (!stdenv.cc.isClang) "-static-libgcc -static-libstdc++"); 64 | 65 | LDFLAGS = 66 | if stdenv.hostPlatform.isDarwin 67 | then "-Wl,-all_load -lpthread -Wl,-noall_load" 68 | else "-Wl,--whole-archive -lpthread -Wl,--no-whole-archive"; 69 | 70 | # Tries to link test binaries binaries dynamically and fails 71 | doCheck = false; 72 | 73 | enableParallelBuilding = true; 74 | 75 | meta = with lib; { 76 | homepage = "https://jemalloc.net/"; 77 | description = "General purpose malloc(3) implementation"; 78 | longDescription = '' 79 | malloc(3)-compatible memory allocator that emphasizes fragmentation 80 | avoidance and scalable concurrency support. 81 | ''; 82 | license = licenses.bsd2; 83 | platforms = platforms.all; 84 | }; 85 | } 86 | -------------------------------------------------------------------------------- /blade/bep/proto_registry/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, fs}; 2 | 3 | use anyhow::{Context, Result}; 4 | use lazy_static::lazy_static; 5 | use prost::Message; 6 | use prost_reflect::DescriptorPool; 7 | use prost_types::FileDescriptorSet; 8 | use runfiles::Runfiles; 9 | use walkdir::WalkDir; 10 | 11 | lazy_static! { 12 | pub static ref DESCRIPTORS: Box<FileDescriptorSet> = Box::new(load()); 13 | } 14 | 15 | fn load() -> FileDescriptorSet { 16 | let mut hs = HashMap::new(); 17 | let r = Runfiles::create().expect("Must run using bazel with runfiles"); 18 | let root = r.rlocation("").unwrap(); 19 | for entry in WalkDir::new(root).follow_links(true) { 20 | let p = entry.expect("invalid entry when walking runfiles"); 21 | if p.path().to_string_lossy().ends_with("proto.bin") { 22 | hs.insert( 23 | p.path().file_name().unwrap().to_string_lossy().to_string(), 24 | p.path().to_path_buf(), 25 | ); 26 | } 27 | } 28 | let mut fds: FileDescriptorSet = FileDescriptorSet::default(); 29 | for v in hs.values() { 30 | let desc = fs::read(v).expect("failed to read descriptor"); 31 | fds.merge(&desc[..]).expect("failed to merge descriptor"); 32 | } 33 | fds 34 | } 35 | 36 | pub fn init_global_descriptor_pool() -> Result<()> { 37 | let b = &*DESCRIPTORS.encode_to_vec(); 38 | DescriptorPool::decode_global_file_descriptor_set(b) 39 | .context("failed to load global descriptor pool")?; 40 | 41 | Ok(()) 42 | } 43 | 44 | #[cfg(test)] 45 | mod tests { 46 | use build_event_stream_proto::*; 47 | use prost_reflect::ReflectMessage; 48 | 49 | use super::*; 50 | 51 | #[test] 52 | fn test_load() { 53 | assert_ne!(DESCRIPTORS.file.len(), 0); 54 | } 55 | 56 | #[test] 57 | #[should_panic] 58 | fn test_reflect_global_pool_default() { 59 | let global = DescriptorPool::global(); 60 | let mut num_wkt = 0; 61 | let mut num_bep = 0; 62 | global.all_messages().for_each(|m| { 63 | if m.full_name().starts_with("google.protobuf.") { 64 | num_wkt += 1; 65 | } else { 66 | num_bep += 1; 67 | } 68 | }); 69 | assert!(num_wkt > 0); 70 | assert_eq!(num_bep, 0); 71 | let be = build_event_stream::EnvironmentVariable { 72 | name: "PATH".into(), 73 | value: "/usr/bin".into(), 74 | }; 75 | be.transcode_to_dynamic(); 76 | } 77 | 78 | #[test] 79 | fn test_init_reflect_global_pool() { 80 | init_global_descriptor_pool().expect("failed to load descriptors into global pool"); 81 | let global = DescriptorPool::global(); 82 | let mut num_wkt = 0; 83 | let mut num_bep = 0; 84 | global.all_messages().for_each(|m| { 85 | if m.full_name().starts_with("google.protobuf.") { 86 | num_wkt += 1; 87 | } else { 88 | num_bep += 1; 89 | } 90 | }); 91 | assert!(num_wkt > 0); 92 | assert!(num_bep > 0); 93 | 94 | let be = build_event_stream::EnvironmentVariable { 95 | name: "PATH".into(), 96 | value: "/usr/bin".into(), 97 | }; 98 | let d = be.transcode_to_dynamic(); 99 | let j = serde_json::ser::to_string(&d).unwrap(); 100 | assert_eq!(j, r#"{"name":"UEFUSA==","value":"L3Vzci9iaW4="}"#); 101 | let mut udo: std::path::PathBuf = 102 | std::env::var("TEST_UNDECLARED_OUTPUTS_DIR").unwrap().into(); 103 | udo.push("random.txt"); 104 | std::fs::write(udo, "random file").unwrap(); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /postgres/harness/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::OpenOptions, io::Write, net::TcpListener}; 2 | 3 | use anyhow::anyhow; 4 | 5 | pub struct PgHarness { 6 | data_path: std::path::PathBuf, 7 | postgres: std::process::Child, 8 | db_name: String, 9 | port: u16, 10 | } 11 | 12 | impl PgHarness { 13 | pub fn uri(&self) -> String { format!("postgres://localhost:{}/{}", self.port, self.db_name) } 14 | 15 | pub fn data_path(&self) -> std::path::PathBuf { self.data_path.clone() } 16 | 17 | pub fn close(&mut self) -> anyhow::Result<()> { 18 | self.postgres.wait()?; 19 | Ok(()) 20 | } 21 | } 22 | 23 | impl Drop for PgHarness { 24 | fn drop(&mut self) { 25 | rustix::process::kill_process( 26 | rustix::process::Pid::from_child(&self.postgres), 27 | rustix::process::Signal::Term, 28 | ) 29 | .unwrap(); 30 | self.close().unwrap(); 31 | } 32 | } 33 | 34 | #[allow(unused_assignments)] 35 | pub fn new(p: &str) -> anyhow::Result<PgHarness> { 36 | let r = runfiles::Runfiles::create()?; 37 | let bin_path = r.rlocation("postgresql-bin/bin").unwrap(); 38 | let initdb_path = bin_path.join("initdb").read_link().unwrap(); 39 | let initdb = std::process::Command::new(initdb_path).arg(p).status()?; 40 | if !initdb.success() { 41 | return Err(anyhow!("failed to init db: {:#?}", initdb)); 42 | } 43 | let config_path = std::path::PathBuf::from(p).join("postgresql.conf"); 44 | let mut port = 0; 45 | 46 | { 47 | let lis = TcpListener::bind("127.0.0.1:0")?; 48 | let addr = lis.local_addr()?; 49 | 50 | let mut f = OpenOptions::new().append(true).open(config_path)?; 51 | port = addr.port(); 52 | writeln!(f, "\nport = {}", addr.port())?; 53 | } 54 | let postgres_path = bin_path.join("postgres").read_link().unwrap(); 55 | let harness = PgHarness { 56 | data_path: p.into(), 57 | postgres: std::process::Command::new(postgres_path) 58 | .arg("-D") 59 | .arg(p) 60 | .arg("-k") 61 | .arg(p) 62 | .spawn()?, 63 | db_name: "blade".into(), 64 | port, 65 | }; 66 | let mut attempts = 3; 67 | let createdb_path = bin_path.join("createdb"); 68 | while attempts > 0 { 69 | if std::process::Command::new(createdb_path.clone()) 70 | .arg("-h") 71 | .arg("127.0.0.1") 72 | .arg("-p") 73 | .arg(harness.port.to_string()) 74 | .arg("blade") 75 | .status()? 76 | .success() 77 | { 78 | break; 79 | } 80 | attempts -= 1; 81 | std::thread::sleep(std::time::Duration::from_secs(1)); 82 | } 83 | Ok(harness) 84 | } 85 | 86 | #[cfg(test)] 87 | mod test { 88 | 89 | #[test] 90 | fn test_new() { 91 | let r = runfiles::Runfiles::create().unwrap(); 92 | let bin_path = r.rlocation("postgresql-bin/bin").unwrap(); 93 | let psql_path = bin_path.join("psql").read_link().unwrap(); 94 | let tmp = tempdir::TempDir::new("test_new").unwrap(); 95 | let path = tmp.path().to_str().unwrap(); 96 | let h = super::new(path).unwrap(); 97 | let uri = h.uri(); 98 | let u = uri.split('/').collect::<Vec<_>>(); 99 | let db_name = u.last().unwrap(); 100 | let port = u[2].split(':').next_back().unwrap(); 101 | let o = std::process::Command::new(psql_path) 102 | .arg("-l") 103 | .arg("-p") 104 | .arg(port) 105 | .arg("-d") 106 | .arg(db_name) 107 | .output() 108 | .unwrap(); 109 | println!("{o:#?}"); 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /third_party/nix/deps.bzl: -------------------------------------------------------------------------------- 1 | """Nix deps.""" 2 | 3 | load("@io_tweag_rules_nixpkgs//nixpkgs:nixpkgs.bzl", "nixpkgs_package") 4 | 5 | def third_party_nix_deps(): 6 | nixpkgs_package( 7 | name = "bintools", 8 | repository = "@nixpkgs", 9 | ) 10 | nixpkgs_package( 11 | name = "wasm-bindgen-cli", 12 | attribute_path = "wasm-bindgen-cli_0_2_100", 13 | build_file = "//third_party/nix:BUILD.wasm-bindgen-cli", 14 | repository = "@nixpkgs", 15 | ) 16 | nixpkgs_package( 17 | name = "zlib", 18 | repository = "@nixpkgs", 19 | ) 20 | nixpkgs_package( 21 | name = "tailwindcss", 22 | attribute_path = "tailwindcss", 23 | repository = "@nixpkgs", 24 | build_file = "//third_party/nix:BUILD.tailwindcss", 25 | ) 26 | nixpkgs_package( 27 | name = "diesel", 28 | repositories = { 29 | "nixpkgs": "@nixpkgs", 30 | "fenix": "@fenix", 31 | }, 32 | nix_file = "//third_party/nix/diesel_cli:bazel.nix", 33 | nix_file_deps = [ 34 | "//third_party/nix/diesel_cli:default.nix", 35 | "//nix/rust:rust_platform.nix", 36 | ], 37 | ) 38 | nixpkgs_package( 39 | name = "sqlite", 40 | attribute_path = "sqlite.out", 41 | repository = "@nixpkgs", 42 | build_file = "//third_party/nix:BUILD.sqlite", 43 | ) 44 | nixpkgs_package( 45 | name = "postgresql", 46 | attribute_path = "postgresql.lib", 47 | repository = "@nixpkgs", 48 | build_file = "//third_party/nix:BUILD.postgresql", 49 | ) 50 | nixpkgs_package( 51 | name = "postgresql-bin", 52 | attribute_path = "postgresql", 53 | repository = "@nixpkgs", 54 | ) 55 | nixpkgs_package( 56 | name = "oci_base", 57 | build_file_content = """exports_files(["closure.tar.gz"])""", 58 | repository = "@nixpkgs", 59 | nix_file = "//third_party/nix/oci_base:default.nix", 60 | nixopts = ["--show-trace"], 61 | ) 62 | nixpkgs_package( 63 | name = "cargo-bazel", 64 | repositories = { 65 | "nixpkgs": "@nixpkgs", 66 | "fenix": "@fenix", 67 | }, 68 | nix_file = "//third_party/nix/cargo-bazel:bazel.nix", 69 | nix_file_deps = [ 70 | "//third_party/nix/cargo-bazel:default.nix", 71 | "//nix/rust:rust_platform.nix", 72 | ], 73 | ) 74 | nixpkgs_package( 75 | name = "jemalloc", 76 | repository = "@nixpkgs", 77 | nix_file = "//third_party/nix/jemalloc:bazel.nix", 78 | nix_file_deps = [ 79 | "//third_party/nix/jemalloc:default.nix", 80 | ], 81 | build_file = "//third_party/nix/jemalloc:BUILD.jemalloc", 82 | ) 83 | nixpkgs_package( 84 | name = "protobuf", 85 | attribute_path = "protobuf", 86 | repository = "@nixpkgs", 87 | build_file = "//third_party/nix:BUILD.protobuf", 88 | ) 89 | nixpkgs_package( 90 | name = "libunwind", 91 | nix_file_content = """{ pkgs ? import <nixpkgs> {} }: 92 | pkgs.buildEnv { 93 | name = "libunwind-combined"; 94 | paths = [ 95 | pkgs.libunwind.out 96 | pkgs.libunwind.dev 97 | ]; 98 | 99 | # Optional: avoid conflicts if files overlap 100 | ignoreCollisions = false; 101 | 102 | # Optional: create symlinks instead of copying 103 | pathsToLink = [ "/" ]; 104 | } 105 | """, 106 | repository = "@nixpkgs", 107 | build_file = "//third_party/nix:BUILD.libunwind", 108 | ) 109 | nixpkgs_package( 110 | name = "pkg-config", 111 | repository = "@nixpkgs", 112 | ) 113 | -------------------------------------------------------------------------------- /blade/routes/profile.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | 3 | use components::{card::Card, charts::ganttchart::BazelTraceChart, summaryheader::SummaryHeader}; 4 | use leptos::{either::Either, prelude::*}; 5 | use trace_event_parser::{BazelTrace, TraceEventFile}; 6 | 7 | #[component] 8 | pub fn BazelProfile() -> impl IntoView { 9 | let invocation = expect_context::<RwSignal<state::InvocationResults>>(); 10 | 11 | // Resource to fetch and parse the profile data 12 | let profile_data = LocalResource::new(move || async move { 13 | let profile_uri = invocation.with(|inv| inv.profile_uri.clone()); 14 | 15 | match profile_uri { 16 | Some(uri) => { 17 | // Fetch the profile artifact 18 | let bytes = shared::get_artifact(uri) 19 | .await 20 | .map_err(|e| format!("Failed to fetch profile: {e}"))?; 21 | 22 | // Decompress gzip if needed 23 | let decompressed_bytes = if bytes.starts_with(&[0x1F, 0x8B]) { 24 | // It's gzipped 25 | let mut decoder = flate2::read::GzDecoder::new(&bytes[..]); 26 | let mut decompressed = Vec::new(); 27 | decoder 28 | .read_to_end(&mut decompressed) 29 | .map_err(|e| format!("Failed to decompress profile: {e}"))?; 30 | decompressed 31 | } else { 32 | bytes 33 | }; 34 | 35 | // Parse JSON 36 | let json_str = String::from_utf8(decompressed_bytes) 37 | .map_err(|e| format!("Failed to convert profile to UTF-8: {e}"))?; 38 | 39 | let trace_file = TraceEventFile::from_json(&json_str) 40 | .map_err(|e| format!("Failed to parse profile JSON: {e}"))?; 41 | 42 | let bazel_trace = BazelTrace::from_trace_events(trace_file.trace_events); 43 | 44 | Ok(bazel_trace) 45 | }, 46 | None => Err("No profile data available for this build".to_string()), 47 | } 48 | }); 49 | 50 | view! { 51 | <div class="flex flex-col m-1 p-1 dark:bg-gray-800"> 52 | <Card class="p-3 m-2"> 53 | <SummaryHeader /> 54 | </Card> 55 | 56 | <Suspense fallback=move || { 57 | view! { <div class="text-center py-8">"Loading profile data..."</div> } 58 | }> 59 | {move || Suspend::new(async move { 60 | match profile_data.await { 61 | Ok(bazel_trace) => { 62 | Either::Left( 63 | view! { 64 | <div class="h-[73dvh] overflow-auto"> 65 | <h2 class="text-lg font-semibold mb-4"> 66 | "Profile Timeline" 67 | </h2> 68 | <BazelTraceChart bazel_trace=bazel_trace /> 69 | </div> 70 | }, 71 | ) 72 | } 73 | Err(error) => { 74 | Either::Right( 75 | view! { 76 | <div class="text-center py-8 text-red-500"> 77 | <p class="font-semibold">"Error loading profile:"</p> 78 | <p class="text-sm mt-2">{error}</p> 79 | </div> 80 | }, 81 | ) 82 | } 83 | } 84 | })} 85 | </Suspense> 86 | </div> 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /WORKSPACE.bazel: -------------------------------------------------------------------------------- 1 | workspace( 2 | name = "blade", 3 | ) 4 | 5 | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") 6 | 7 | http_archive( 8 | name = "io_tweag_rules_nixpkgs", 9 | sha256 = "30271f7bd380e4e20e4d7132c324946c4fdbc31ebe0bbb6638a0f61a37e74397", 10 | strip_prefix = "rules_nixpkgs-0.13.0", 11 | urls = ["https://github.com/tweag/rules_nixpkgs/releases/download/v0.13.0/rules_nixpkgs-0.13.0.tar.gz"], 12 | ) 13 | 14 | load("@io_tweag_rules_nixpkgs//nixpkgs:repositories.bzl", "rules_nixpkgs_dependencies") 15 | 16 | rules_nixpkgs_dependencies() 17 | 18 | load("@io_tweag_rules_nixpkgs//nixpkgs:nixpkgs.bzl", "nixpkgs_cc_configure", "nixpkgs_local_repository") 19 | 20 | # It is recommended to keep nixpkgs of nix-shell (which provide Bazel), 21 | # and nixpkgs of Bazel Workspace in sync - otherwise one may 22 | # got hit with nasty glibc mismatch errors. 23 | nixpkgs_local_repository( 24 | name = "nixpkgs", 25 | nix_file = "//nix/bazel:nixpkgs.nix", 26 | nix_file_deps = [ 27 | "//:flake.lock", 28 | "//nix/bazel:nixpkgs.nix", 29 | ], 30 | ) 31 | 32 | nixpkgs_local_repository( 33 | name = "fenix", 34 | nix_file = "//nix/rust:fenix.nix", 35 | nix_file_deps = ["//:flake.lock"], 36 | ) 37 | 38 | # rules_rust also uses the cc compiler 39 | nixpkgs_cc_configure( 40 | name = "nixpkgs_config_cc", 41 | nix_file_content = "import ./nix/cc/cc.nix {}", 42 | nix_file_deps = ["//nix/cc:cc.nix"], 43 | repository = "@nixpkgs", 44 | ) 45 | 46 | load("@io_tweag_rules_nixpkgs//nixpkgs:toolchains/rust.bzl", "nixpkgs_rust_configure") 47 | 48 | # Note that default_edition doesn't work. 49 | nixpkgs_rust_configure( 50 | name = "nix_rust", 51 | nix_file = "//nix/rust:rust.nix", 52 | repositories = { 53 | "nixpkgs": "@nixpkgs", 54 | "fenix": "@fenix", 55 | }, 56 | ) 57 | 58 | nixpkgs_rust_configure( 59 | name = "nix_rust_wasm", 60 | nix_file = "//nix/rust:rust.nix", 61 | nixopts = [ 62 | "--argstr", 63 | "target", 64 | "wasm32-unknown-unknown", 65 | ], 66 | repositories = { 67 | "nixpkgs": "@nixpkgs", 68 | "fenix": "@fenix", 69 | }, 70 | target_constraints = [ 71 | "@platforms//cpu:wasm32", 72 | "@platforms//os:none", 73 | ], 74 | ) 75 | 76 | # crate_universe as a way of governing deps 77 | load("@rules_rust//crate_universe:repositories.bzl", "crate_universe_dependencies") 78 | 79 | crate_universe_dependencies( 80 | rust_toolchain_cargo_template = "@nix_rust//:bin/{tool}", 81 | rust_toolchain_rustc_template = "@nix_rust//:bin/{tool}", 82 | ) 83 | 84 | load("//third_party/nix:deps.bzl", "third_party_nix_deps") 85 | 86 | third_party_nix_deps() 87 | 88 | load("//third_party/rust:deps.bzl", "rust_dependencies") 89 | 90 | rust_dependencies() 91 | 92 | load("@crate//:defs.bzl", "crate_repositories") 93 | 94 | crate_repositories() 95 | 96 | load("@wasm_crate//:defs.bzl", wasm_crate_repositories = "crate_repositories") 97 | 98 | wasm_crate_repositories() 99 | 100 | register_toolchains("//nix/rust:rust_analyzer_toolchain") 101 | 102 | register_toolchains("//nix/rust:rustfmt_toolchain") 103 | 104 | register_toolchains("//nix/rust:wasm_bindgen_toolchain") 105 | 106 | register_toolchains("//prost:prost_toolchain") 107 | 108 | http_archive( 109 | name = "googleapis", 110 | repo_mapping = { 111 | "@com_google_protobuf": "@protobuf", 112 | }, 113 | sha256 = "9094b43a8a8b6f05dd4868cb509fa934012725107995865b5c8eb9c67fbea35d", 114 | strip_prefix = "googleapis-db5ce67d735d2ceb6fe925f3e317a3f30835cfd6", 115 | urls = ["https://github.com/googleapis/googleapis/archive/db5ce67d735d2ceb6fe925f3e317a3f30835cfd6.tar.gz"], 116 | ) 117 | 118 | load("@googleapis//:repository_rules.bzl", "switched_rules_by_language") 119 | 120 | switched_rules_by_language("com_google_googleapis_imports") 121 | -------------------------------------------------------------------------------- /blade/memdump/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::{CString, c_char}, 3 | path::PathBuf, 4 | }; 5 | 6 | use anyhow::{Context, anyhow}; 7 | use tokio::io::AsyncReadExt; 8 | 9 | #[cfg(test)] 10 | #[global_allocator] 11 | static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; 12 | 13 | const PROF_DUMP: &[u8] = b"prof.dump\0"; 14 | const PROF_ACTIVE: &[u8] = b"prof.active\0"; 15 | 16 | pub fn is_profiling_active() -> bool { 17 | unsafe { 18 | let Ok(e) = tikv_jemalloc_ctl::raw::read(PROF_ACTIVE) else { 19 | return false; 20 | }; 21 | e 22 | } 23 | } 24 | 25 | pub async fn dump_profile() -> anyhow::Result<Vec<u8>> { 26 | if !tikv_jemalloc_ctl::profiling::prof::read()? || !is_profiling_active() { 27 | return Err(anyhow!("profiling not enabled!")); 28 | } 29 | let tmp_path = tempdir::TempDir::new("blade-memdump").context("failed to create tempdir")?; 30 | 31 | let mut path_buf = PathBuf::from(tmp_path.path()); 32 | path_buf.push("blade.hprof"); 33 | 34 | let path = path_buf 35 | .to_str() 36 | .ok_or_else(|| anyhow!("failed to convert path to str"))? 37 | .to_string(); 38 | 39 | let mut bytes = CString::new(path.as_str()) 40 | .context(format!("failed to convert '{path:#?}' to bytes"))? 41 | .into_bytes_with_nul(); 42 | 43 | { 44 | // #safety: we always expect a valid temp file path to write profiling data to. 45 | let ptr = bytes.as_mut_ptr() as *mut c_char; 46 | unsafe { 47 | tikv_jemalloc_ctl::raw::write(PROF_DUMP, ptr) 48 | .map_err(|e| anyhow!("failed to take profile: {e:#?}"))? 49 | } 50 | } 51 | 52 | let mut f = tokio::fs::File::open(path.as_str()) 53 | .await 54 | .context("failed to open profile")?; 55 | let mut buf = vec![]; 56 | let _ = f 57 | .read_to_end(&mut buf) 58 | .await 59 | .context("failed to read profile")?; 60 | Ok(buf) 61 | } 62 | 63 | pub async fn stats() -> anyhow::Result<Vec<u8>> { 64 | let mut opts = tikv_jemalloc_ctl::stats_print::Options::default(); 65 | opts.json_format = true; 66 | let mut buf = Vec::<u8>::new(); 67 | tikv_jemalloc_ctl::stats_print::stats_print(&mut buf, opts).context("failed to print stats")?; 68 | Ok(buf) 69 | } 70 | 71 | pub async fn enable_profiling(enable: bool) -> anyhow::Result<()> { 72 | if !tikv_jemalloc_ctl::profiling::prof::read()? { 73 | return Err(anyhow!("profiling not enabled!")); 74 | } 75 | unsafe { 76 | _ = tikv_jemalloc_ctl::raw::update(PROF_ACTIVE, enable) 77 | .context("failed to set profiling status")?; 78 | } 79 | Ok(()) 80 | } 81 | 82 | #[cfg(test)] 83 | mod test { 84 | use crate::*; 85 | 86 | fn is_profiling_enabled() -> bool { 87 | let Ok(s) = tikv_jemalloc_ctl::profiling::prof::read() else { 88 | return false; 89 | }; 90 | 91 | s 92 | } 93 | 94 | #[tokio::test] 95 | async fn test_stats() { 96 | let s = stats().await.unwrap(); 97 | assert!(!s.is_empty()); 98 | } 99 | 100 | #[tokio::test] 101 | async fn test_enabled() { 102 | if !is_profiling_enabled() { 103 | println!("Memory profiling disabled, skipping..."); 104 | return; 105 | } 106 | enable_profiling(false).await.unwrap(); 107 | 108 | dump_profile().await.unwrap(); 109 | 110 | enable_profiling(true).await.unwrap(); 111 | 112 | dump_profile().await.unwrap(); 113 | } 114 | 115 | #[tokio::test] 116 | async fn test_disabled() { 117 | if is_profiling_enabled() { 118 | println!("Memory profiling enabled, skipping..."); 119 | return; 120 | } 121 | enable_profiling(false).await.unwrap_err(); 122 | 123 | dump_profile().await.unwrap_err(); 124 | 125 | enable_profiling(true).await.unwrap_err(); 126 | 127 | dump_profile().await.unwrap_err(); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /third_party/nix/diesel_cli/default.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | stdenv, 4 | diesel-cli, 5 | fetchCrate, 6 | installShellFiles, 7 | libiconv, 8 | libmysqlclient, 9 | nix-update-script, 10 | openssl, 11 | pkg-config, 12 | postgresql, 13 | rustPlatform, 14 | sqlite, 15 | testers, 16 | zlib, 17 | sqliteSupport ? true, 18 | postgresqlSupport ? true, 19 | mysqlSupport ? false, 20 | }: 21 | assert lib.assertMsg (lib.elem true [ 22 | postgresqlSupport 23 | mysqlSupport 24 | sqliteSupport 25 | ]) "support for at least one database must be enabled"; 26 | rustPlatform.buildRustPackage rec { 27 | pname = "diesel-cli"; 28 | version = "2.2.11"; 29 | 30 | src = fetchCrate { 31 | inherit version; 32 | crateName = "diesel_cli"; 33 | hash = "sha256-utiIuifPxHjvC0TkY2XLeOlqReaal/4T4hrJ7tmQ27k="; 34 | }; 35 | 36 | cargoHash = "sha256-QHcH0jgBAYtyYJoaBJW92HR5ZBgdMLupe5+l22Wpfjg="; 37 | 38 | nativeBuildInputs = [ 39 | installShellFiles 40 | pkg-config 41 | ]; 42 | 43 | buildInputs = 44 | [openssl] 45 | ++ lib.optional sqliteSupport sqlite 46 | ++ lib.optional postgresqlSupport postgresql 47 | ++ lib.optionals mysqlSupport [ 48 | libmysqlclient 49 | zlib 50 | ]; 51 | 52 | verbose = true; 53 | 54 | buildNoDefaultFeatures = true; 55 | buildFeatures = 56 | lib.optional sqliteSupport "sqlite" 57 | ++ lib.optional postgresqlSupport "postgres" 58 | ++ lib.optional mysqlSupport "mysql"; 59 | 60 | RUSTFLAGS = lib.optionalString stdenv.isLinux "-C linker-features=-lld"; 61 | 62 | checkFlags = [ 63 | # all of these require a live database to be running 64 | # `DATABASE_URL must be set in order to run tests: NotPresent` 65 | "--skip=infer_schema_internals::information_schema::tests::get_primary_keys_only_includes_primary_key" 66 | "--skip=infer_schema_internals::information_schema::tests::load_table_names_loads_from_custom_schema" 67 | "--skip=infer_schema_internals::information_schema::tests::load_table_names_loads_from_public_schema_if_none_given" 68 | "--skip=infer_schema_internals::information_schema::tests::load_table_names_output_is_ordered" 69 | "--skip=infer_schema_internals::information_schema::tests::skip_views" 70 | "--skip=infer_schema_internals::mysql::test::get_table_data_loads_column_information" 71 | "--skip=infer_schema_internals::mysql::test::gets_table_comment" 72 | "--skip=infer_schema_internals::pg::test::get_foreign_keys_loads_foreign_keys" 73 | "--skip=infer_schema_internals::pg::test::get_foreign_keys_loads_foreign_keys_with_same_name" 74 | "--skip=infer_schema_internals::pg::test::get_table_data_loads_column_information" 75 | "--skip=infer_schema_internals::pg::test::gets_table_comment" 76 | ]; 77 | cargoCheckFeatures = buildFeatures; 78 | 79 | # Tests currently fail due to *many* duplicate definition errors 80 | doCheck = false; 81 | 82 | postInstall = lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) '' 83 | installShellCompletion --cmd diesel \ 84 | --bash <($out/bin/diesel completions bash) \ 85 | --fish <($out/bin/diesel completions fish) \ 86 | --zsh <($out/bin/diesel completions zsh) 87 | ''; 88 | 89 | # Fix the build with mariadb, which otherwise shows "error adding symbols: 90 | # DSO missing from command line" errors for libz and libssl. 91 | env.NIX_LDFLAGS = lib.optionalString mysqlSupport "-lz -lssl -lcrypto"; 92 | 93 | passthru = { 94 | tests.version = testers.testVersion {package = diesel-cli;}; 95 | updateScript = nix-update-script {}; 96 | }; 97 | 98 | meta = { 99 | description = "Database tool for working with Rust projects that use Diesel"; 100 | homepage = "https://diesel.rs"; 101 | changelog = "https://github.com/diesel-rs/diesel/releases/tag/v${version}"; 102 | license = with lib.licenses; [ 103 | mit 104 | asl20 105 | ]; 106 | maintainers = with lib.maintainers; [getchoo]; 107 | mainProgram = "diesel"; 108 | }; 109 | } 110 | --------------------------------------------------------------------------------