├── BUILD
├── .bazelignore
├── examples
├── .bazelignore
├── file.txt
├── strings_golden.txt
├── other_module
│ ├── hello.jsonnet
│ ├── MODULE.bazel
│ └── BUILD.bazel
├── other_module_golden.txt
├── imports
│ ├── d.libsonnet
│ ├── a.libsonnet
│ ├── tier2
│ │ ├── c.libsonnet
│ │ ├── tier3
│ │ │ ├── b.libsonnet
│ │ │ └── BUILD
│ │ └── BUILD
│ └── BUILD
├── strings.jsonnet
├── imports_default_output.jsonnet
├── codefile.libsonnet
├── codefile2.libsonnet
├── invalid.out
├── tla_code_file_input.json
├── extvar_str_golden.json
├── other_module.jsonnet
├── tla_str_files_golden.json
├── extvar_files_generated_golden.json
├── extvar_env_golden.json
├── multiple_outs.jsonnet
├── tla_code_files.jsonnet
├── tlavar_str.jsonnet
├── multiple_outs_nested_asymmetric.jsonnet
├── out_dir.jsonnet
├── tla_code_library.jsonnet
├── extvar_files_golden.json
├── tla_code_files_golden.json
├── tlavar_env.jsonnet
├── yaml_stream_golden.yaml
├── extvar_filegroup_golden.json
├── yaml_stream.jsonnet
├── tools
│ └── stamping
│ │ └── workspace_status.sh
├── extvar_code_library.jsonnet
├── extvar_files.jsonnet
├── imports_golden.json
├── extvar_files_generated.jsonnet
├── extvar_env.jsonnet
├── tla_code_library_golden.json
├── extvar_stamp_golden.json
├── tlavar_stamp.jsonnet
├── extvar_filegroup.jsonnet
├── extvar_str.jsonnet
├── .bazelrc
├── extvar_files_library_golden.json
├── extvar_stamp.jsonnet
├── other_toolchain_module
│ └── MODULE.bazel
├── invalid.jsonnet
├── MODULE.bazel
├── code_library.libsonnet
├── shell-workflows.jsonnet
├── imports.jsonnet
├── workflow.libsonnet
├── intersection_golden.json
├── wordcount_golden.json
├── intersection.jsonnet
├── wordcount.jsonnet
├── shell-workflows_golden.json
└── BUILD
├── CODEOWNERS
├── renovate.json
├── .gitignore
├── docs
├── index.html
└── jsonnet
│ └── jsonnet.html
├── AUTHORS
├── CONTRIBUTORS
├── jsonnet
├── docs.bzl
├── toolchain.bzl
├── BUILD
├── extensions.bzl
├── stamper.py
└── jsonnet.bzl
├── .bazelci
└── presubmit.yml
├── .github
└── workflows
│ ├── release.yml
│ └── create_archive_and_notes.sh
├── CONTRIBUTING.md
├── README.md
├── MODULE.bazel
└── LICENSE
/BUILD:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.bazelignore:
--------------------------------------------------------------------------------
1 | examples
2 |
--------------------------------------------------------------------------------
/examples/.bazelignore:
--------------------------------------------------------------------------------
1 | other_module
2 |
--------------------------------------------------------------------------------
/examples/file.txt:
--------------------------------------------------------------------------------
1 | this is great
2 |
--------------------------------------------------------------------------------
/examples/strings_golden.txt:
--------------------------------------------------------------------------------
1 | "a": 5
2 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @sparkprime @Globegitter
2 |
--------------------------------------------------------------------------------
/examples/other_module/hello.jsonnet:
--------------------------------------------------------------------------------
1 | 'hello'
2 |
--------------------------------------------------------------------------------
/examples/other_module_golden.txt:
--------------------------------------------------------------------------------
1 | hello world
2 |
--------------------------------------------------------------------------------
/examples/imports/d.libsonnet:
--------------------------------------------------------------------------------
1 | {
2 | d: 'I am "D."',
3 | }
4 |
--------------------------------------------------------------------------------
/examples/strings.jsonnet:
--------------------------------------------------------------------------------
1 | std.manifestYamlDoc({"a": 5})
2 |
3 |
--------------------------------------------------------------------------------
/examples/imports_default_output.jsonnet:
--------------------------------------------------------------------------------
1 | import 'imports.json'
2 |
--------------------------------------------------------------------------------
/examples/codefile.libsonnet:
--------------------------------------------------------------------------------
1 | {
2 | weather: "Finally spring!"
3 | }
4 |
--------------------------------------------------------------------------------
/examples/codefile2.libsonnet:
--------------------------------------------------------------------------------
1 | {
2 | weather: "Finally summer!"
3 | }
4 |
--------------------------------------------------------------------------------
/examples/invalid.out:
--------------------------------------------------------------------------------
1 | ^RUNTIME ERROR: Foo.*invalid\.jsonnet:15:1-13*
2 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "config:base"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/examples/tla_code_file_input.json:
--------------------------------------------------------------------------------
1 | {
2 | "testvar": "example value"
3 | }
4 |
5 |
--------------------------------------------------------------------------------
/examples/extvar_str_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "mydefine": "sky",
3 | "str": "sun"
4 | }
5 |
--------------------------------------------------------------------------------
/examples/other_module.jsonnet:
--------------------------------------------------------------------------------
1 | (import 'hello.jsonnet') + ' ' + (import 'world.jsonnet')
2 |
--------------------------------------------------------------------------------
/examples/tla_str_files_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "tla_file_contents": "this is great\n"
3 | }
4 |
--------------------------------------------------------------------------------
/examples/extvar_files_generated_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "file1": "{}\n",
3 | "file2": {}
4 | }
--------------------------------------------------------------------------------
/examples/extvar_env_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "my_jsonnet": "some code",
3 | "my_test": "test"
4 | }
5 |
--------------------------------------------------------------------------------
/examples/multiple_outs.jsonnet:
--------------------------------------------------------------------------------
1 | {
2 | 'dir1/file1.json': {},
3 | 'dir2/file2.json': {},
4 | }
5 |
--------------------------------------------------------------------------------
/examples/imports/a.libsonnet:
--------------------------------------------------------------------------------
1 | local b = import 'b.libsonnet';
2 |
3 | b {
4 | a: 'I am "A."',
5 | }
6 |
--------------------------------------------------------------------------------
/examples/tla_code_files.jsonnet:
--------------------------------------------------------------------------------
1 | function(tla_file = {}) {
2 | "tla_file_contents": tla_file,
3 | }
4 |
--------------------------------------------------------------------------------
/examples/tlavar_str.jsonnet:
--------------------------------------------------------------------------------
1 | function(str, mydefine) {
2 | str: str,
3 | mydefine: mydefine,
4 | }
5 |
--------------------------------------------------------------------------------
/examples/imports/tier2/c.libsonnet:
--------------------------------------------------------------------------------
1 | local d = import 'd.libsonnet';
2 |
3 | d {
4 | c: 'I am "C."',
5 | }
6 |
--------------------------------------------------------------------------------
/examples/multiple_outs_nested_asymmetric.jsonnet:
--------------------------------------------------------------------------------
1 | {
2 | 'aaaaa/file.json': {},
3 | 'file.json': {},
4 | }
5 |
--------------------------------------------------------------------------------
/examples/out_dir.jsonnet:
--------------------------------------------------------------------------------
1 | {
2 | 'hello.txt': 'Hello, Bazel!',
3 | 'goodbye.txt': 'Goodbye, Bazel!',
4 | }
5 |
--------------------------------------------------------------------------------
/examples/tla_code_library.jsonnet:
--------------------------------------------------------------------------------
1 | function(tla_code) {
2 | tla_code_contents: tla_code.workflow.Job,
3 | }
4 |
--------------------------------------------------------------------------------
/examples/extvar_files_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "file1": "this is great\n",
3 | "file2": "Finally spring!"
4 | }
5 |
--------------------------------------------------------------------------------
/examples/imports/tier2/tier3/b.libsonnet:
--------------------------------------------------------------------------------
1 | local c = import 'c.libsonnet';
2 |
3 | c {
4 | b: 'I am "B."',
5 | }
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | bazel-*
2 | # These files are not determinstic/stable, so don't check them in for now.
3 | MODULE.bazel.lock
4 |
--------------------------------------------------------------------------------
/examples/tla_code_files_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "tla_file_contents": {
3 | "testvar": "example value"
4 | }
5 | }
6 |
7 |
--------------------------------------------------------------------------------
/examples/tlavar_env.jsonnet:
--------------------------------------------------------------------------------
1 | function(MYTEST, MYJSONNET) {
2 | my_test: MYTEST,
3 | my_jsonnet: MYJSONNET.code,
4 | }
5 |
--------------------------------------------------------------------------------
/examples/yaml_stream_golden.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | .*a.*:.*1.*
3 | .*b.*:.*2.*
4 | ---
5 | .*c.*:.*hello.*
6 | .*d.*:.*world.*
7 | ...
8 |
--------------------------------------------------------------------------------
/examples/extvar_filegroup_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "file1": "this is great\n",
3 | "file2": "Finally spring!",
4 | "file3": "Finally summer!"
5 | }
6 |
--------------------------------------------------------------------------------
/examples/yaml_stream.jsonnet:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | a: 1,
4 | b: 2,
5 | },
6 | {
7 | c: "hello",
8 | d: "world",
9 | },
10 | ]
11 |
--------------------------------------------------------------------------------
/examples/tools/stamping/workspace_status.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "STABLE_K8S_CLUSTER MyCluster"
4 | echo 'COMPLEX_JSON { nested: { secret: "top" } }'
5 |
--------------------------------------------------------------------------------
/examples/extvar_code_library.jsonnet:
--------------------------------------------------------------------------------
1 | local codefile = std.extVar('codefile');
2 |
3 | {
4 | job: codefile.workflow.Job,
5 | shJob: codefile.workflow.ShJob,
6 | }
7 |
--------------------------------------------------------------------------------
/examples/extvar_files.jsonnet:
--------------------------------------------------------------------------------
1 | local test = std.extVar("test");
2 | local codefile = std.extVar("codefile");
3 |
4 | {
5 | file1: test,
6 | file2: codefile.weather,
7 | }
8 |
--------------------------------------------------------------------------------
/examples/imports_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "a": "I am \"A.\"",
3 | "b": "I am \"B.\"",
4 | "c": "I am \"C.\"",
5 | "d": "I am \"D.\"",
6 | "top": "I am the top."
7 | }
8 |
--------------------------------------------------------------------------------
/examples/extvar_files_generated.jsonnet:
--------------------------------------------------------------------------------
1 | local test = std.extVar("test");
2 | local codefile = std.extVar("codefile");
3 |
4 | {
5 | file1: test,
6 | file2: codefile,
7 | }
8 |
--------------------------------------------------------------------------------
/examples/extvar_env.jsonnet:
--------------------------------------------------------------------------------
1 | local my_test = std.extVar("MYTEST");
2 | local my_jsonnet = std.extVar("MYJSONNET");
3 |
4 | {
5 | my_test: my_test,
6 | my_jsonnet: my_jsonnet.code,
7 | }
8 |
--------------------------------------------------------------------------------
/examples/tla_code_library_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "tla_code_contents": {
3 | "deps": [],
4 | "inputs": [],
5 | "outputs": [],
6 | "type": "base"
7 | }
8 | }
9 |
10 |
--------------------------------------------------------------------------------
/examples/extvar_stamp_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "file1": "test",
3 | "mydefine": "sky",
4 | "non_stamp": "non_stamp",
5 | "k8s": "MyCluster",
6 | "complex": { "secret": "top" },
7 | "my_json": { "test": "something" }
8 | }
9 |
--------------------------------------------------------------------------------
/examples/tlavar_stamp.jsonnet:
--------------------------------------------------------------------------------
1 | function(k8s, complex, my_json, mydefine, non_stamp) {
2 | file1: 'test',
3 | mydefine: mydefine,
4 | non_stamp: non_stamp,
5 | k8s: k8s,
6 | complex: complex.nested,
7 | my_json: my_json,
8 | }
9 |
--------------------------------------------------------------------------------
/examples/extvar_filegroup.jsonnet:
--------------------------------------------------------------------------------
1 | local test = std.extVar('1-test');
2 | local codefile = std.extVar('codefile');
3 | local codefile2 = std.extVar('codefile2');
4 | {
5 | file1: test,
6 | file2: codefile.weather,
7 | file3: codefile2.weather,
8 | }
9 |
--------------------------------------------------------------------------------
/examples/extvar_str.jsonnet:
--------------------------------------------------------------------------------
1 | local test = std.extVar("test");
2 | local codefile = std.extVar("codefile");
3 | local str = std.extVar("str");
4 | local mydefine = std.extVar("mydefine");
5 |
6 | {
7 | str: str,
8 | mydefine: mydefine,
9 | }
10 |
--------------------------------------------------------------------------------
/examples/imports/tier2/BUILD:
--------------------------------------------------------------------------------
1 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
2 |
3 | jsonnet_library(
4 | name = "c",
5 | srcs = ["c.libsonnet"],
6 | imports = ["../"],
7 | deps = ["//imports:d"],
8 | visibility = ["//imports/tier2/tier3:__pkg__"],
9 | )
10 |
--------------------------------------------------------------------------------
/examples/imports/tier2/tier3/BUILD:
--------------------------------------------------------------------------------
1 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
2 |
3 | jsonnet_library(
4 | name = "b",
5 | srcs = ["b.libsonnet"],
6 | imports = ["../"],
7 | deps = ["//imports/tier2:c"],
8 | visibility = ["//imports:__pkg__"],
9 | )
10 |
--------------------------------------------------------------------------------
/examples/other_module/MODULE.bazel:
--------------------------------------------------------------------------------
1 | module(
2 | name = "other_module",
3 | version = "0.0.0",
4 | )
5 |
6 | bazel_dep(name = "rules_jsonnet", version = "0.0.0")
7 |
8 | jsonnet = use_extension("@rules_jsonnet//jsonnet:extensions.bzl", "jsonnet")
9 | jsonnet.compiler(name = "rust")
10 |
--------------------------------------------------------------------------------
/examples/.bazelrc:
--------------------------------------------------------------------------------
1 | build --action_env MYTEST="test" --action_env MYJSONNET="{code: 'some code'}"
2 | build --define mydefine="sky"
3 | build --workspace_status_command=tools/stamping/workspace_status.sh
4 | build --verbose_failures
5 | test --test_verbose_timeout_warnings --test_summary=detailed --test_output=all
6 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | rules_jsonnet documentation has moved
5 |
6 |
7 | The documentation for rules_jsonnet can now be found on Bazel Central Registry.
8 |
9 |
10 |
--------------------------------------------------------------------------------
/docs/jsonnet/jsonnet.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | rules_jsonnet documentation has moved
5 |
6 |
7 | The documentation for rules_jsonnet can now be found on Bazel Central Registry.
8 |
9 |
10 |
--------------------------------------------------------------------------------
/examples/extvar_files_library_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "job": {
3 | "deps": [ ],
4 | "inputs": [ ],
5 | "outputs": [ ],
6 | "type": "base"
7 | },
8 | "shJob": {
9 | "command": "",
10 | "deps": [ ],
11 | "inputs": [ ],
12 | "outputs": [ ],
13 | "type": "sh",
14 | "vars": { }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/examples/extvar_stamp.jsonnet:
--------------------------------------------------------------------------------
1 | local k8s = std.extVar("k8s");
2 | local complex = std.extVar("complex");
3 | local my_json = std.extVar("my_json");
4 | local mydefine = std.extVar("mydefine");
5 | local non_stamp = std.extVar("non_stamp");
6 |
7 | {
8 | file1: "test",
9 | mydefine: mydefine,
10 | non_stamp: non_stamp,
11 | k8s: k8s,
12 | complex: complex.nested,
13 | my_json: my_json,
14 | }
15 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | # This the official list of Bazel authors for copyright purposes.
2 | # This file is distinct from the CONTRIBUTORS files.
3 | # See the latter for an explanation.
4 |
5 | # Names should be added to this file as:
6 | # Name or Organization
7 | # The email address is not required for organizations.
8 |
9 | Google Inc.
10 | Damien Martin-Guillerez
11 | David Chen
12 |
--------------------------------------------------------------------------------
/examples/imports/BUILD:
--------------------------------------------------------------------------------
1 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
2 |
3 | jsonnet_library(
4 | name = "a",
5 | srcs = ["a.libsonnet"],
6 | imports = ["tier2/tier3"],
7 | visibility = ["//:__pkg__"],
8 | deps = ["//imports/tier2/tier3:b"],
9 | )
10 |
11 | jsonnet_library(
12 | name = "d",
13 | srcs = ["d.libsonnet"],
14 | visibility = ["//imports/tier2:__pkg__"],
15 | )
16 |
--------------------------------------------------------------------------------
/examples/other_toolchain_module/MODULE.bazel:
--------------------------------------------------------------------------------
1 | # This module 'other_toolchain_module' is here to test multiple modules
2 | # providing different toolchains and the logic in the toolchain conflict
3 | # resolution logic.
4 |
5 | module(
6 | name = "other_toolchain_module",
7 | version = "0.0.0",
8 | )
9 |
10 | bazel_dep(name = "rules_jsonnet", version = "0.0.0")
11 |
12 | jsonnet = use_extension("@rules_jsonnet//jsonnet:extensions.bzl", "jsonnet")
13 | jsonnet.compiler(name = "rust")
14 |
--------------------------------------------------------------------------------
/examples/other_module/BUILD.bazel:
--------------------------------------------------------------------------------
1 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
2 |
3 | jsonnet_library(
4 | name = "hello",
5 | srcs = ["hello.jsonnet"],
6 | visibility = ["//visibility:public"],
7 | )
8 |
9 | genrule(
10 | name = "world_src",
11 | outs = ["world.jsonnet"],
12 | cmd = "echo \\'world\\' > $@",
13 | )
14 |
15 | jsonnet_library(
16 | name = "world",
17 | srcs = ["world.jsonnet"],
18 | visibility = ["//visibility:public"],
19 | )
20 |
--------------------------------------------------------------------------------
/CONTRIBUTORS:
--------------------------------------------------------------------------------
1 | # People who have agreed to one of the CLAs and can contribute patches.
2 | # The AUTHORS file lists the copyright holders; this file
3 | # lists people. For example, Google employees are listed here
4 | # but not in AUTHORS, because Google holds the copyright.
5 | #
6 | # https://developers.google.com/open-source/cla/individual
7 | # https://developers.google.com/open-source/cla/corporate
8 | #
9 | # Names should be added to this file as:
10 | # Name
11 |
12 | Damien Martin-Guillerez
13 | David Chen
14 | Justine Alexandra Roberts Tunney
15 |
--------------------------------------------------------------------------------
/examples/invalid.jsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | error "Foo."
16 |
--------------------------------------------------------------------------------
/examples/MODULE.bazel:
--------------------------------------------------------------------------------
1 | module(
2 | name = "examples",
3 | version = "0.0.0",
4 | )
5 |
6 | bazel_dep(name = "rules_jsonnet", version = "0.0.0")
7 | local_path_override(
8 | module_name = "rules_jsonnet",
9 | path = "..",
10 | )
11 |
12 | jsonnet = use_extension("@rules_jsonnet//jsonnet:extensions.bzl", "jsonnet")
13 | jsonnet.compiler(name = "go")
14 |
15 | bazel_dep(name = "other_module", version = "0.0.0")
16 | local_path_override(
17 | module_name = "other_module",
18 | path = "other_module",
19 | )
20 |
21 | bazel_dep(name = "other_toolchain_module", version = "0.0.0")
22 | local_path_override(
23 | module_name = "other_toolchain_module",
24 | path = "other_toolchain_module",
25 | )
26 |
--------------------------------------------------------------------------------
/examples/code_library.libsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | // Re-export dependencies for dependency inversion
16 | {
17 | workflow: import 'workflow.libsonnet',
18 | }
19 |
--------------------------------------------------------------------------------
/jsonnet/docs.bzl:
--------------------------------------------------------------------------------
1 | """\
2 | # Jsonnet Rules
3 |
4 | These are build rules for working with [Jsonnet][jsonnet] files with Bazel.
5 |
6 | [jsonnet]: https://jsonnet.org/
7 |
8 | ## Setup
9 |
10 | To use the Jsonnet rules as part of your Bazel project, please follow the
11 | instructions on [the releases page](https://github.com/bazelbuild/rules_jsonnet/releases).
12 | """
13 |
14 | load("//jsonnet:toolchain.bzl", _jsonnet_toolchain = "jsonnet_toolchain")
15 | load(
16 | "//jsonnet:jsonnet.bzl",
17 | _jsonnet_library = "jsonnet_library",
18 | _jsonnet_to_json = "jsonnet_to_json",
19 | _jsonnet_to_json_test = "jsonnet_to_json_test",
20 | )
21 |
22 | jsonnet_toolchain = _jsonnet_toolchain
23 | jsonnet_library = _jsonnet_library
24 | jsonnet_to_json = _jsonnet_to_json
25 | jsonnet_to_json_test = _jsonnet_to_json_test
26 |
--------------------------------------------------------------------------------
/examples/shell-workflows.jsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | local wordcount = import "wordcount.jsonnet";
16 | local intersection = import "intersection.jsonnet";
17 |
18 | {
19 | "wordcount-workflow.json": wordcount,
20 | "intersection-workflow.json": intersection,
21 | }
22 |
--------------------------------------------------------------------------------
/.bazelci/presubmit.yml:
--------------------------------------------------------------------------------
1 | ---
2 | matrix:
3 | platform:
4 | - ubuntu2204
5 | - macos
6 |
7 | tasks:
8 | rules_jsonnet:
9 | name: rules_jsonnet
10 | platform: ${{ platform }}
11 | build_targets:
12 | - //...
13 |
14 | examples_go:
15 | name: examples
16 | platform: ${{ platform }}
17 | working_directory: examples
18 | test_flags:
19 | - "--extra_toolchains=@rules_jsonnet//jsonnet:go_jsonnet_toolchain"
20 | test_targets:
21 | - //...
22 |
23 | examples_cpp:
24 | name: examples
25 | platform: ${{ platform }}
26 | working_directory: examples
27 | test_flags:
28 | - "--extra_toolchains=@rules_jsonnet//jsonnet:cpp_jsonnet_toolchain"
29 | test_targets:
30 | - //...
31 |
32 | examples_rust:
33 | name: examples
34 | platform: ${{ platform }}
35 | working_directory: examples
36 | test_flags:
37 | - "--extra_toolchains=@rules_jsonnet//jsonnet:rust_jsonnet_toolchain"
38 | test_targets:
39 | - //...
40 |
--------------------------------------------------------------------------------
/examples/imports.jsonnet:
--------------------------------------------------------------------------------
1 | local a = import 'a.libsonnet';
2 |
3 | // Verify that the "imports" attribute on the "jsonnet_library," //
4 | // "jsonnet_to_json," and "jsonnet_to_json_test" rules produces
5 | // invocations of the Jsonnet tool that establish proper library search
6 | // directories necessary to locate the imported files.
7 | //
8 | // In this example, we have the following dependency graph:
9 | //
10 | // File name Containing directory Import style
11 | // ========= ==================== ============
12 | // imports.jsonnet ./
13 | // | Down once
14 | // `-> a.libsonnet ./imports
15 | // | Down twice
16 | // `-> b.libsonnet ./imports/tier2/tier3
17 | // | Up once
18 | // `-> c.libsonnet ./imports/tier2
19 | // | Up once
20 | // `-> d.libsonnet ./imports
21 | //
22 | // That is, the import statements jump around within four directories
23 | // to confirm that we can include multiple sibling files in the same
24 | // directory from files located in other directories both higher and
25 | // lower in the tree.
26 |
27 | a {
28 | top: 'I am the top.',
29 | }
30 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2024 The Bazel Authors. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Cut a release whenever a new tag is pushed to the repo.
16 | name: Release
17 |
18 | on:
19 | push:
20 | tags:
21 | - "*.*.*"
22 |
23 | jobs:
24 | build:
25 | runs-on: ubuntu-latest
26 | steps:
27 | - name: Checkout
28 | uses: actions/checkout@v5
29 | - name: Create release archive and notes
30 | run: .github/workflows/create_archive_and_notes.sh
31 | - name: Release
32 | uses: softprops/action-gh-release@v2
33 | with:
34 | # Use GH feature to populate the changelog automatically
35 | generate_release_notes: true
36 | body_path: release_notes.txt
37 | fail_on_unmatched_files: true
38 | files: rules_jsonnet-*.tar.gz
39 |
--------------------------------------------------------------------------------
/examples/workflow.libsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | // Configuration for a hypothetical workflow scheduling system.
16 | {
17 | // Configuration for a workflow.
18 | Workflow:: {
19 | schedule: {},
20 | retries: 5,
21 | jobs: {},
22 | },
23 |
24 | // Scheduling configuration for a workflow.
25 | Schedule:: {
26 | start_date: "",
27 | start_time: "",
28 | repeat_frequency: 0,
29 | repeat_type: "",
30 | },
31 |
32 | // Base configuration for a Job in a workflow.
33 | Job:: {
34 | type: "base",
35 | deps: [],
36 | inputs: [],
37 | outputs: [],
38 | },
39 |
40 | // Configuration for a job that runs a shell command.
41 | ShJob:: self.Job {
42 | type: "sh",
43 | command: "",
44 | vars: {},
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/examples/intersection_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "intersection": {
3 | "jobs": {
4 | "intersect": {
5 | "command": "comm -12 /tmp/list1_sorted /tmp/list2_sorted > /tmp/intersection",
6 | "deps": [
7 | ":sort_file1",
8 | ":sort_file2"
9 | ],
10 | "inputs": [
11 | "/tmp/list1_sorted",
12 | "/tmp/list2_sorted"
13 | ],
14 | "outputs": [
15 | "/tmp/intersection"
16 | ],
17 | "type": "sh",
18 | "vars": { }
19 | },
20 | "sort_file1": {
21 | "command": "sort /tmp/list1 > /tmp/list1_sorted",
22 | "deps": [ ],
23 | "inputs": [
24 | "/tmp/list1"
25 | ],
26 | "outputs": [
27 | "/tmp/list1_sorted"
28 | ],
29 | "type": "sh",
30 | "vars": { }
31 | },
32 | "sort_file2": {
33 | "command": "sort /tmp/list2 > /tmp/list2_sorted",
34 | "deps": [ ],
35 | "inputs": [
36 | "/tmp/list2"
37 | ],
38 | "outputs": [
39 | "/tmp/list2_sorted"
40 | ],
41 | "type": "sh",
42 | "vars": { }
43 | }
44 | },
45 | "retries": 5,
46 | "schedule": { }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/examples/wordcount_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "wordcount": {
3 | "jobs": {
4 | "count": {
5 | "command": "uniq -c /tmp/sorted_tokens > /tmp/counts",
6 | "deps": [
7 | ":sort"
8 | ],
9 | "inputs": [
10 | "/tmp/sorted_tokens"
11 | ],
12 | "outputs": [
13 | "/tmp/counts"
14 | ],
15 | "type": "sh",
16 | "vars": { }
17 | },
18 | "sort": {
19 | "command": "sort /tmp/tokens > /tmp/sorted_tokens",
20 | "deps": [
21 | ":tokenize"
22 | ],
23 | "inputs": [
24 | "/tmp/tokens"
25 | ],
26 | "outputs": [
27 | "/tmp/sorted_tokens"
28 | ],
29 | "type": "sh",
30 | "vars": { }
31 | },
32 | "tokenize": {
33 | "command": "tr ' ' '\n' < /tmp/passage_test > /tmp/tokens",
34 | "deps": [ ],
35 | "inputs": [
36 | "/tmp/passage_test"
37 | ],
38 | "outputs": [
39 | "/tmp/tokens"
40 | ],
41 | "type": "sh",
42 | "vars": { }
43 | }
44 | },
45 | "retries": 12,
46 | "schedule": {
47 | "repeat_frequency": 1,
48 | "repeat_type": "week",
49 | "start_date": "2015-11-15",
50 | "start_time": "17:30"
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Want to contribute? Great! First, read this page (including the small print at the end).
2 |
3 | ### Before you contribute
4 | **Before we can use your code, you must sign the
5 | [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual?csw=1)
6 | (CLA)**, which you can do online.
7 |
8 | The CLA is necessary mainly because you own the copyright to your changes,
9 | even after your contribution becomes part of our codebase, so we need your
10 | permission to use and distribute your code. We also need to be sure of
11 | various other things — for instance that you'll tell us if you know that
12 | your code infringes on other people's patents. You don't have to sign
13 | the CLA until after you've submitted your code for review and a member has
14 | approved it, but you must do it before we can put your code into our codebase.
15 |
16 | Before you start working on a larger contribution, you should get in touch
17 | with us first. Use the issue tracker to explain your idea so we can help and
18 | possibly guide you.
19 |
20 | ### Code reviews and other contributions.
21 | **All submissions, including submissions by project members, require review.**
22 | Please follow the instructions in [the contributors documentation](http://bazel.io/contributing.html).
23 |
24 | ### The small print
25 | Contributions made by corporations are covered by a different agreement than
26 | the one above, the
27 | [Software Grant and Corporate Contributor License Agreement](https://cla.developers.google.com/about/google-corporate).
28 |
--------------------------------------------------------------------------------
/jsonnet/toolchain.bzl:
--------------------------------------------------------------------------------
1 | JsonnetToolchainInfo = provider(
2 | doc = "Jsonnet toolchain provider",
3 | fields = {
4 | "compiler": "The file to the Jsonnet compiler",
5 | "create_directory_flags": "The flags to pass when creating a directory.",
6 | "manifest_file_support": (
7 | "If the Jsonnet compiler supports writing the output filenames to a " +
8 | "manifest file."
9 | ),
10 | },
11 | )
12 |
13 | def _jsonnet_toolchain_impl(ctx):
14 | toolchain_info = platform_common.ToolchainInfo(
15 | jsonnetinfo = JsonnetToolchainInfo(
16 | compiler = ctx.executable.compiler,
17 | create_directory_flags = ctx.attr.create_directory_flags,
18 | manifest_file_support = ctx.attr.manifest_file_support,
19 | ),
20 | )
21 | return [toolchain_info]
22 |
23 | jsonnet_toolchain = rule(
24 | implementation = _jsonnet_toolchain_impl,
25 | doc = "The Jsonnet compiler information.",
26 | attrs = {
27 | "compiler": attr.label(
28 | executable = True,
29 | cfg = "exec",
30 | ),
31 | "create_directory_flags": attr.string_list(
32 | mandatory = True,
33 | doc = (
34 | "The flags passed to the Jsonnet compiler when a directory " +
35 | "must be created."
36 | ),
37 | ),
38 | "manifest_file_support": attr.bool(
39 | mandatory = True,
40 | doc = (
41 | "If the Jsonnet compiler supports writing the output filenames " +
42 | "to a manifest file."
43 | ),
44 | ),
45 | },
46 | )
47 |
--------------------------------------------------------------------------------
/.github/workflows/create_archive_and_notes.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright 2024 The Bazel Authors. All rights reserved.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | set -o errexit -o nounset -o pipefail
17 |
18 | # Set by GH actions, see
19 | # https://docs.github.com/en/actions/learn-github-actions/environment-variables#default-environment-variables
20 | TAG=${GITHUB_REF_NAME}
21 | # A prefix is added to better match the GitHub generated archives.
22 | PREFIX="rules_jsonnet-${TAG}"
23 | ARCHIVE="rules_jsonnet-$TAG.tar.gz"
24 | git archive --format=tar --prefix=${PREFIX}/ ${TAG} | gzip > $ARCHIVE
25 |
26 | cat > release_notes.txt << EOF
27 | ## Setup
28 |
29 | To use the Jsonnet rules, add the following to your \`MODULE.bazel\` file:
30 |
31 | \`\`\`starlark
32 | bazel_dep(name = "rules_jsonnet", version = "${TAG}")
33 | \`\`\`
34 | EOF
35 |
36 | # Add generated API docs to the release
37 | # See https://github.com/bazelbuild/bazel-central-registry/blob/main/docs/stardoc.md
38 | docs="$(mktemp -d)"; targets="$(mktemp)"
39 | bazel --output_base="$docs" query --output=label --output_file="$targets" 'kind("starlark_doc_extract rule", //...)'
40 | bazel --output_base="$docs" build --target_pattern_file="$targets"
41 | tar --create --auto-compress \
42 | --directory "$(bazel --output_base="$docs" info bazel-bin)" \
43 | --file "$GITHUB_WORKSPACE/${ARCHIVE%.tar.gz}.docs.tar.gz" .
44 |
--------------------------------------------------------------------------------
/examples/intersection.jsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | local workflow = import "workflow.libsonnet";
16 |
17 | // Workflow that performs an intersection of two files using shell commands.
18 | {
19 | intersection: workflow.Workflow {
20 | jobs: {
21 | local input_file1 = "/tmp/list1",
22 | local input_file2 = "/tmp/list2",
23 | local sorted_file1 = "/tmp/list1_sorted",
24 | local sorted_file2 = "/tmp/list2_sorted",
25 | local intersection = "/tmp/intersection",
26 |
27 | SortJob:: workflow.ShJob {
28 | input_file:: "",
29 | output_file:: "",
30 | command: "sort %s > %s" % [self.input_file, self.output_file],
31 | inputs: [self.input_file],
32 | outputs: [self.output_file],
33 | },
34 |
35 | sort_file1: self.SortJob {
36 | input_file:: input_file1,
37 | output_file:: sorted_file1,
38 | },
39 |
40 | sort_file2: self.SortJob {
41 | input_file:: input_file2,
42 | output_file:: sorted_file2,
43 | },
44 |
45 | intersect: workflow.ShJob {
46 | deps: [
47 | ":sort_file1",
48 | ":sort_file2",
49 | ],
50 | command: "comm -12 %s %s > %s" %
51 | [sorted_file1, sorted_file2, intersection],
52 | inputs: [
53 | sorted_file1,
54 | sorted_file2,
55 | ],
56 | outputs: [intersection],
57 | },
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/jsonnet/BUILD:
--------------------------------------------------------------------------------
1 | load(":toolchain.bzl", "jsonnet_toolchain")
2 | load("@rules_python//python:py_binary.bzl", "py_binary")
3 | load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
4 |
5 | bzl_library(
6 | name = "bzl_srcs",
7 | srcs = ["@bazel_tools//tools:bzl_srcs"],
8 | )
9 |
10 | bzl_library(
11 | name = "docs",
12 | srcs = [
13 | "docs.bzl",
14 | "jsonnet.bzl",
15 | "toolchain.bzl",
16 | ],
17 | visibility = ["//visibility:public"],
18 | deps = [
19 | ":bzl_srcs",
20 | "@bazel_skylib//lib:paths",
21 | "@bazel_skylib//lib:shell",
22 | ],
23 | )
24 |
25 | starlark_doc_extract(
26 | name = "docs.extract",
27 | src = "docs.bzl",
28 | deps = [":docs"],
29 | )
30 |
31 | py_binary(
32 | name = "stamper",
33 | srcs = ["stamper.py"],
34 | main = "stamper.py",
35 | visibility = ["//visibility:public"],
36 | )
37 |
38 | toolchain_type(name = "toolchain_type")
39 |
40 | jsonnet_toolchain(
41 | name = "rust_jsonnet",
42 | compiler = "@crates_jsonnet//:jrsonnet__jrsonnet",
43 | create_directory_flags = ["-c"],
44 | manifest_file_support = False,
45 | )
46 |
47 | jsonnet_toolchain(
48 | name = "go_jsonnet",
49 | compiler = "@jsonnet_go//cmd/jsonnet",
50 | create_directory_flags = ["-c"],
51 | manifest_file_support = True,
52 | )
53 |
54 | jsonnet_toolchain(
55 | name = "cpp_jsonnet",
56 | compiler = "@jsonnet//cmd:jsonnet",
57 | create_directory_flags = [],
58 | manifest_file_support = True,
59 | )
60 |
61 | toolchain(
62 | name = "rust_jsonnet_toolchain",
63 | toolchain = ":rust_jsonnet",
64 | toolchain_type = ":toolchain_type",
65 | visibility = ["//visibility:public"],
66 | )
67 |
68 | toolchain(
69 | name = "go_jsonnet_toolchain",
70 | toolchain = ":go_jsonnet",
71 | toolchain_type = ":toolchain_type",
72 | visibility = ["//visibility:public"],
73 | )
74 |
75 | toolchain(
76 | name = "cpp_jsonnet_toolchain",
77 | toolchain = ":cpp_jsonnet",
78 | toolchain_type = ":toolchain_type",
79 | visibility = ["//visibility:public"],
80 | )
81 |
--------------------------------------------------------------------------------
/jsonnet/extensions.bzl:
--------------------------------------------------------------------------------
1 | def _get_jsonnet_compiler(module_ctx):
2 | """_get_jsonnet_compiler resolves a Jsonnet compiler from the module graph."""
3 |
4 | modules_with_compiler = [
5 | module
6 | for module in module_ctx.modules
7 | if module.tags.compiler
8 | ]
9 |
10 | if not modules_with_compiler:
11 | return "go"
12 |
13 | for module in modules_with_compiler:
14 | if len(module.tags.compiler) != 1:
15 | fail(
16 | "Only one compiler can be specified, got: %s" %
17 | [compiler.name for compiler in module.tags.compiler],
18 | )
19 |
20 | if module.is_root:
21 | return module.tags.compiler[0].name
22 |
23 | compiler_name = modules_with_compiler[0].tags.compiler[0].name
24 | for module in modules_with_compiler:
25 | if module.tags.compiler[0].name != compiler_name:
26 | fail(
27 | "Different compilers specified by different modules, got: %s. " %
28 | [compiler_name, module.tags.compiler[0].name] +
29 | "Specify a compiler in the root module to resolve this.",
30 | )
31 |
32 | return compiler_name
33 |
34 | def _jsonnet_impl(module_ctx):
35 | _jsonnet_toolchain_repo(
36 | name = "rules_jsonnet_toolchain",
37 | compiler = _get_jsonnet_compiler(module_ctx),
38 | )
39 |
40 | jsonnet = module_extension(
41 | implementation = _jsonnet_impl,
42 | tag_classes = {
43 | "compiler": tag_class(
44 | attrs = {
45 | "name": attr.string(),
46 | },
47 | ),
48 | },
49 | )
50 |
51 | def _jsonnet_toolchain_repo_impl(ctx):
52 | ctx.file(
53 | "BUILD.bazel",
54 | content = """
55 | alias(
56 | name = "toolchain",
57 | actual = "@rules_jsonnet//jsonnet:%s_jsonnet_toolchain",
58 | )
59 | """ % ctx.attr.compiler,
60 | executable = False,
61 | )
62 |
63 | _jsonnet_toolchain_repo = repository_rule(
64 | implementation = _jsonnet_toolchain_repo_impl,
65 | attrs = {
66 | "compiler": attr.string(),
67 | },
68 | )
69 |
--------------------------------------------------------------------------------
/jsonnet/stamper.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 The Bazel Authors. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | """Resolve stamp variables."""
15 |
16 | import argparse
17 | import sys
18 |
19 | parser = argparse.ArgumentParser(description='Resolve stamp references.')
20 |
21 | parser.add_argument('--format', action='store',
22 | help='The format string containing stamp variables.')
23 |
24 | parser.add_argument('--output', action='store',
25 | help='The filename into which we write the result.')
26 |
27 | parser.add_argument('--stamp-info-file', action='append', required=False,
28 | help=('A list of files from which to read substitutions '
29 | 'to make in the provided --name, e.g. {BUILD_USER}'))
30 |
31 | def main():
32 | args = parser.parse_args()
33 |
34 | # Read our stamp variable files.
35 | format_args = {}
36 | for infofile in args.stamp_info_file or []:
37 | with open(infofile) as info:
38 | for line in info:
39 | line = line.strip('\n')
40 | if not line:
41 | continue
42 | elts = line.split(' ', 1)
43 | if len(elts) != 2:
44 | raise Exception('Malformed line: %s' % line)
45 | (key, value) = elts
46 | if key in format_args:
47 | print ('WARNING: Duplicate value for key "%s": '
48 | 'using "%s"' % (key, value))
49 | format_args[key] = value
50 | with open(args.output, 'w') as f:
51 | f.write(args.format.format(**format_args))
52 |
53 |
54 | if __name__ == '__main__':
55 | main()
56 |
--------------------------------------------------------------------------------
/examples/wordcount.jsonnet:
--------------------------------------------------------------------------------
1 | // Copyright 2015 The Bazel Authors. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | local workflow = import "workflow.libsonnet";
16 |
17 | // Workflow that performs a wordcount using shell commands.
18 | {
19 | wordcount: workflow.Workflow {
20 | retries: 12,
21 | schedule: workflow.Schedule {
22 | start_date: "2015-11-15",
23 | start_time: "17:30",
24 | repeat_frequency: 1,
25 | repeat_type: "week",
26 | },
27 | jobs: {
28 | local input_file = "/tmp/passage_test",
29 | local tokens_file = "/tmp/tokens",
30 | local sorted_tokens_file = "/tmp/sorted_tokens",
31 | local counts_file = "/tmp/counts",
32 |
33 | // Reads the input file and produces an output file with one word per
34 | // line.
35 | tokenize: workflow.ShJob {
36 | command: "tr ' ' '\n' < %s > %s" % [input_file, tokens_file],
37 | inputs: [input_file],
38 | outputs: [tokens_file],
39 | },
40 |
41 | // Takes the tokens file and produces a file with the tokens sorted.
42 | sort: workflow.ShJob {
43 | deps: [":tokenize"],
44 | command: "sort %s > %s" % [tokens_file, sorted_tokens_file],
45 | inputs: [tokens_file],
46 | outputs: [sorted_tokens_file],
47 | },
48 |
49 | // Takes the file containing sorted tokens and produces a file containing
50 | // the counts for each word.
51 | count: workflow.ShJob {
52 | deps: [":sort"],
53 | command: "uniq -c %s > %s" % [sorted_tokens_file, counts_file],
54 | inputs: [sorted_tokens_file],
55 | outputs: [counts_file],
56 | },
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://buildkite.com/bazel/rules-jsonnet-postsubmit)
2 |
3 | # Jsonnet Rules
4 |
5 |
13 |
14 | ## Overview
15 |
16 | These are build rules for working with [Jsonnet][jsonnet] files with Bazel.
17 |
18 | [jsonnet]: https://jsonnet.org
19 |
20 | ## Setup
21 |
22 | To use the Jsonnet rules as part of your Bazel project, please follow the
23 | instructions on [the releases page](https://github.com/bazelbuild/rules_jsonnet/releases).
24 |
25 | ## Jsonnet Compiler Selection
26 |
27 | By default for Bzlmod, Bazel will use the [Go
28 | compiler](https://github.com/google/go-jsonnet). Note that the
29 | primary development focus of the Jsonnet project is now with the Go compiler.
30 | This repository's support for using the C++ compiler is deprecated, and may be
31 | removed in a future release.
32 |
33 | To use [the
34 | C++](https://github.com/google/jsonnet) or
35 | [Rust](https://github.com/CertainLach/jrsonnet) compiler of Jsonnet instead,
36 | register a different compiler:
37 |
38 | | Jsonnet compiler | MODULE.bazel directive |
39 | | ---------------- | --------------------------------- |
40 | | Go | `jsonnet.compiler(name = "go")` |
41 | | cpp | `jsonnet.compiler(name = "cpp")` |
42 | | Rust | `jsonnet.compiler(name = "rust")` |
43 |
44 | ### CLI
45 |
46 | Use the `--extra_toolchains` flag to pass the preferred toolchain to the bazel
47 | invocation:
48 |
49 | ```bash
50 | bazel build //... --extra_toolchains=@rules_jsonnet//jsonnet:cpp_jsonnet_toolchain
51 |
52 | bazel test //... --extra_toolchains=@rules_jsonnet//jsonnet:rust_jsonnet_toolchain
53 |
54 | bazel run //... --extra_toolchains=@rules_jsonnet//jsonnet:go_jsonnet_toolchain
55 | ```
56 |
57 | ## Rule usage
58 |
59 | Please refer to [the StarDoc generated documentation](docs/jsonnet.md)
60 | for instructions on how to use these rules.
61 |
--------------------------------------------------------------------------------
/MODULE.bazel:
--------------------------------------------------------------------------------
1 | module(
2 | name = "rules_jsonnet",
3 | version = "0.7.2",
4 | )
5 |
6 | bazel_dep(name = "bazel_skylib", version = "1.8.1")
7 | bazel_dep(name = "jsonnet", version = "0.21.0")
8 | bazel_dep(name = "jsonnet_go", version = "0.21.0")
9 | bazel_dep(name = "rules_python", version = "1.7.0")
10 | bazel_dep(name = "rules_rust", version = "0.68.1")
11 |
12 | jsonnet = use_extension("//jsonnet:extensions.bzl", "jsonnet")
13 | use_repo(jsonnet, "rules_jsonnet_toolchain")
14 |
15 | register_toolchains("@rules_jsonnet_toolchain//:toolchain")
16 |
17 | rust_host = use_extension("@rules_rust//rust:extensions.bzl", "rust_host_tools")
18 | rust_host.host_tools(
19 | name = "rust_host_tools_jsonnet",
20 | sha256s = {
21 | "2025-07-01/rustc-nightly-aarch64-apple-darwin.tar.xz": "b5fb4b5272fea4d4ef6e3896e484e9748fda4f29be428ae3a55c22f70566b54c",
22 | "2025-07-01/clippy-nightly-aarch64-apple-darwin.tar.xz": "a0715713220f6cb56031a86c91de7a26d1f89d149afc2e01af625a89ca63f673",
23 | "2025-07-01/cargo-nightly-aarch64-apple-darwin.tar.xz": "4eb240f69df9f9159c6fef128c3b7c24e5e1ae8aaf1357de4924fd518bd54941",
24 | "2025-07-01/llvm-tools-nightly-aarch64-apple-darwin.tar.xz": "ab8d9977ba3187819008b70dab317654bb290fc220cfea35b4f2ea165ce32e70",
25 | "2025-07-01/rust-std-nightly-aarch64-apple-darwin.tar.xz": "da6c8e6f256bb6512485db068d35109a3c77ccac678bc28134665cd1b547863b",
26 | },
27 | version = "nightly/2025-07-01",
28 | )
29 | use_repo(rust_host, "rust_host_tools_jsonnet")
30 |
31 | rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
32 | rust.toolchain(
33 | edition = "2021",
34 | versions = ["1.88.0"],
35 | )
36 | use_repo(rust, "rust_toolchains")
37 |
38 | register_toolchains("@rust_toolchains//:all")
39 |
40 | crate = use_extension("@rules_rust//crate_universe:extension.bzl", "crate")
41 | crate.spec(
42 | # Binary artifacts can't be depended upon without specifically marking the
43 | # artifact as `bin`.
44 | artifact = "bin",
45 | package = "jrsonnet",
46 | version = "0.5.0-pre95",
47 | )
48 |
49 | # Required for rules_rust to generate binary targets for the Jrsonnet crate.
50 | crate.annotation(
51 | crate = "jrsonnet",
52 | gen_binaries = ["jrsonnet"],
53 | )
54 | crate.from_specs(
55 | name = "crates_jsonnet",
56 | host_tools = "@rust_host_tools_jsonnet",
57 | )
58 | use_repo(crate, "crates_jsonnet")
59 |
--------------------------------------------------------------------------------
/examples/shell-workflows_golden.json:
--------------------------------------------------------------------------------
1 | {
2 | "intersection-workflow.json": {
3 | "intersection": {
4 | "jobs": {
5 | "intersect": {
6 | "command": "comm -12 /tmp/list1_sorted /tmp/list2_sorted > /tmp/intersection",
7 | "deps": [
8 | ":sort_file1",
9 | ":sort_file2"
10 | ],
11 | "inputs": [
12 | "/tmp/list1_sorted",
13 | "/tmp/list2_sorted"
14 | ],
15 | "outputs": [
16 | "/tmp/intersection"
17 | ],
18 | "type": "sh",
19 | "vars": { }
20 | },
21 | "sort_file1": {
22 | "command": "sort /tmp/list1 > /tmp/list1_sorted",
23 | "deps": [ ],
24 | "inputs": [
25 | "/tmp/list1"
26 | ],
27 | "outputs": [
28 | "/tmp/list1_sorted"
29 | ],
30 | "type": "sh",
31 | "vars": { }
32 | },
33 | "sort_file2": {
34 | "command": "sort /tmp/list2 > /tmp/list2_sorted",
35 | "deps": [ ],
36 | "inputs": [
37 | "/tmp/list2"
38 | ],
39 | "outputs": [
40 | "/tmp/list2_sorted"
41 | ],
42 | "type": "sh",
43 | "vars": { }
44 | }
45 | },
46 | "retries": 5,
47 | "schedule": { }
48 | }
49 | },
50 | "wordcount-workflow.json": {
51 | "wordcount": {
52 | "jobs": {
53 | "count": {
54 | "command": "uniq -c /tmp/sorted_tokens > /tmp/counts",
55 | "deps": [
56 | ":sort"
57 | ],
58 | "inputs": [
59 | "/tmp/sorted_tokens"
60 | ],
61 | "outputs": [
62 | "/tmp/counts"
63 | ],
64 | "type": "sh",
65 | "vars": { }
66 | },
67 | "sort": {
68 | "command": "sort /tmp/tokens > /tmp/sorted_tokens",
69 | "deps": [
70 | ":tokenize"
71 | ],
72 | "inputs": [
73 | "/tmp/tokens"
74 | ],
75 | "outputs": [
76 | "/tmp/sorted_tokens"
77 | ],
78 | "type": "sh",
79 | "vars": { }
80 | },
81 | "tokenize": {
82 | "command": "tr ' ' '\n' < /tmp/passage_test > /tmp/tokens",
83 | "deps": [ ],
84 | "inputs": [
85 | "/tmp/passage_test"
86 | ],
87 | "outputs": [
88 | "/tmp/tokens"
89 | ],
90 | "type": "sh",
91 | "vars": { }
92 | }
93 | },
94 | "retries": 12,
95 | "schedule": {
96 | "repeat_frequency": 1,
97 | "repeat_type": "week",
98 | "start_date": "2015-11-15",
99 | "start_time": "17:30"
100 | }
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/examples/BUILD:
--------------------------------------------------------------------------------
1 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library", "jsonnet_to_json", "jsonnet_to_json_test")
2 |
3 | package(default_visibility = ["//visibility:public"])
4 |
5 | # This directory contains unit and regression tests that also serve as examples
6 | # for jsonnet rules. The BUILD rules should not contain any jsonnet_to_json
7 | # rules as this is redundant with jsonnet_to_json_test rules.
8 |
9 | jsonnet_library(
10 | name = "workflow",
11 | srcs = ["workflow.libsonnet"],
12 | )
13 |
14 | jsonnet_to_json_test(
15 | name = "wordcount_test",
16 | src = "wordcount.jsonnet",
17 | golden = "wordcount_golden.json",
18 | deps = [":workflow"],
19 | )
20 |
21 | jsonnet_to_json_test(
22 | name = "intersection_test",
23 | src = "intersection.jsonnet",
24 | golden = "intersection_golden.json",
25 | deps = [":workflow"],
26 | )
27 |
28 | jsonnet_library(
29 | name = "shell-workflows-lib",
30 | srcs = [
31 | "intersection.jsonnet",
32 | "wordcount.jsonnet",
33 | ],
34 | deps = [":workflow"],
35 | )
36 |
37 | jsonnet_to_json_test(
38 | name = "shell-workflows",
39 | src = "shell-workflows.jsonnet",
40 | golden = "shell-workflows_golden.json",
41 | deps = [":shell-workflows-lib"],
42 | )
43 |
44 | jsonnet_to_json_test(
45 | name = "invalid_test",
46 | src = "invalid.jsonnet",
47 | error = 1,
48 | golden = "invalid.out",
49 | regex = 1,
50 | )
51 |
52 | jsonnet_to_json_test(
53 | name = "invalid_failed_test",
54 | src = "invalid.jsonnet",
55 | error = 1,
56 | )
57 |
58 | jsonnet_to_json_test(
59 | name = "extvar_env_test",
60 | size = "small",
61 | src = "extvar_env.jsonnet",
62 | ext_code_envs = ["MYJSONNET"],
63 | ext_str_envs = ["MYTEST"],
64 | golden = "extvar_env_golden.json",
65 | )
66 |
67 | jsonnet_to_json_test(
68 | name = "extvar_str_test",
69 | size = "small",
70 | src = "extvar_str.jsonnet",
71 | ext_strs = {
72 | "str": "sun",
73 | "mydefine": "$(mydefine)",
74 | },
75 | golden = "extvar_str_golden.json",
76 | )
77 |
78 | jsonnet_to_json_test(
79 | name = "tlavar_env_test",
80 | size = "small",
81 | src = "tlavar_env.jsonnet",
82 | golden = "extvar_env_golden.json",
83 | tla_code_envs = ["MYJSONNET"],
84 | tla_str_envs = ["MYTEST"],
85 | )
86 |
87 | jsonnet_to_json_test(
88 | name = "tlavar_str_test",
89 | size = "small",
90 | src = "tlavar_str.jsonnet",
91 | golden = "extvar_str_golden.json",
92 | tla_strs = {
93 | "str": "sun",
94 | "mydefine": "$(mydefine)",
95 | },
96 | )
97 |
98 | jsonnet_to_json_test(
99 | name = "extvar_files_generated_test",
100 | size = "small",
101 | src = "extvar_files_generated.jsonnet",
102 | ext_code_file_vars = ["codefile"],
103 | ext_code_files = [":generated.jsonnet"],
104 | ext_str_file_vars = ["test"],
105 | ext_str_files = [":generated.jsonnet"],
106 | golden = "extvar_files_generated_golden.json",
107 | )
108 |
109 | jsonnet_to_json_test(
110 | name = "extvar_files_test",
111 | size = "small",
112 | src = "extvar_files.jsonnet",
113 | ext_code_file_vars = ["codefile"],
114 | ext_code_files = [":codefile.libsonnet"],
115 | ext_str_file_vars = ["test"],
116 | ext_str_files = [":file.txt"],
117 | golden = "extvar_files_golden.json",
118 | )
119 |
120 | jsonnet_library(
121 | name = "code_library_lib",
122 | srcs = ["code_library.libsonnet"],
123 | deps = [":workflow"]
124 | )
125 |
126 | jsonnet_to_json_test(
127 | name = "extvar_code_library_test",
128 | size = "small",
129 | src = "extvar_code_library.jsonnet",
130 | ext_code_libraries = { ":code_library_lib": "codefile" },
131 | golden = "extvar_files_library_golden.json",
132 | )
133 |
134 | jsonnet_to_json_test(
135 | name = "tla_code_library_test",
136 | size = "small",
137 | src = "tla_code_library.jsonnet",
138 | tla_code_libraries = { ":code_library_lib": "tla_code" },
139 | golden = "tla_code_library_golden.json",
140 | )
141 |
142 | jsonnet_to_json_test(
143 | name = "tla_code_files_test",
144 | size = "small",
145 | src = "tla_code_files.jsonnet",
146 | golden = "tla_code_files_golden.json",
147 | tla_code_files = {
148 | "tla_code_file_input.json": "tla_file",
149 | },
150 | )
151 |
152 | jsonnet_to_json_test(
153 | name = "tla_str_files_test",
154 | size = "small",
155 | src = "tla_code_files.jsonnet",
156 | golden = "tla_str_files_golden.json",
157 | tla_str_files = {
158 | "file.txt": "tla_file",
159 | },
160 | )
161 |
162 | filegroup(
163 | name = "test_str_files",
164 | srcs = ["file.txt"],
165 | )
166 |
167 | filegroup(
168 | name = "test_code_files",
169 | srcs = [
170 | "codefile.libsonnet",
171 | "codefile2.libsonnet",
172 | ],
173 | )
174 |
175 | jsonnet_to_json_test(
176 | name = "extvar_files_test_filegroup",
177 | size = "small",
178 | src = "extvar_filegroup.jsonnet",
179 | ext_code_file_vars = [
180 | "codefile",
181 | "codefile2",
182 | ],
183 | ext_code_files = [":test_code_files"],
184 | ext_str_file_vars = ["1-test"],
185 | ext_str_files = [":test_str_files"],
186 | golden = "extvar_filegroup_golden.json",
187 | )
188 |
189 | jsonnet_to_json_test(
190 | name = "generated_src_test",
191 | src = ":generated",
192 | golden = ":generated",
193 | )
194 |
195 | genrule(
196 | name = "generated",
197 | testonly = 1,
198 | outs = ["generated.jsonnet"],
199 | cmd = "echo {} > $@",
200 | )
201 |
202 | jsonnet_to_json_test(
203 | name = "extvar_stamp_test",
204 | size = "small",
205 | src = "extvar_stamp.jsonnet",
206 | ext_code = {
207 | "complex": "{COMPLEX_JSON}",
208 | "my_json": "{test: 'something'}",
209 | },
210 | ext_strs = {
211 | "non_stamp": "non_stamp",
212 | "mydefine": "$(mydefine)",
213 | "k8s": "{STABLE_K8S_CLUSTER}",
214 | },
215 | golden = "extvar_stamp_golden.json",
216 | stamp_keys = [
217 | "k8s",
218 | "complex",
219 | ],
220 | )
221 |
222 | jsonnet_to_json_test(
223 | name = "tlavar_stamp_test",
224 | size = "small",
225 | src = "tlavar_stamp.jsonnet",
226 | golden = "extvar_stamp_golden.json",
227 | stamp_keys = [
228 | "k8s",
229 | "complex",
230 | ],
231 | tla_code = {
232 | "complex": "{COMPLEX_JSON}",
233 | "my_json": "{test: 'something'}",
234 | },
235 | tla_strs = {
236 | "non_stamp": "non_stamp",
237 | "mydefine": "$(mydefine)",
238 | "k8s": "{STABLE_K8S_CLUSTER}",
239 | },
240 | )
241 |
242 | jsonnet_to_json_test(
243 | name = "yaml_stream_test",
244 | src = "yaml_stream.jsonnet",
245 | golden = "yaml_stream_golden.yaml",
246 | regex = 1,
247 | yaml_stream = 1,
248 | )
249 |
250 | jsonnet_to_json(
251 | name = "imports_build",
252 | src = "imports.jsonnet",
253 | outs = ["imports.json"],
254 | imports = ["imports"],
255 | deps = ["//imports:a"],
256 | )
257 |
258 | jsonnet_to_json_test(
259 | name = "imports_test",
260 | src = "imports.jsonnet",
261 | golden = "imports_golden.json",
262 | imports = ["imports"],
263 | deps = ["//imports:a"],
264 | )
265 |
266 | jsonnet_library(
267 | name = "imports_default_output",
268 | srcs = [":imports_build"],
269 | )
270 |
271 | jsonnet_to_json_test(
272 | name = "imports_default_output_test",
273 | src = "imports_default_output.jsonnet",
274 | golden = "imports_golden.json",
275 | imports = ["imports"],
276 | deps = [":imports_default_output"],
277 | )
278 |
279 | jsonnet_to_json_test(
280 | name = "strings_test",
281 | src = "strings.jsonnet",
282 | canonicalize_golden = False,
283 | extra_args = ["--string"],
284 | golden = "strings_golden.txt",
285 | )
286 |
287 | jsonnet_to_json_test(
288 | name = "output_file_contents_smoke_test",
289 | src = "wordcount.jsonnet",
290 | golden = "wordcount_golden.json",
291 | output_file_contents = False,
292 | deps = [":workflow"],
293 | )
294 |
295 | jsonnet_to_json_test(
296 | name = "other_module_test",
297 | src = "other_module.jsonnet",
298 | canonicalize_golden = False,
299 | extra_args = ["--string"],
300 | golden = "other_module_golden.txt",
301 | deps = [
302 | "@other_module//:hello",
303 | "@other_module//:world",
304 | ],
305 | )
306 |
307 | jsonnet_to_json(
308 | name = "out_dir",
309 | src = "out_dir.jsonnet",
310 | out_dir = "out_dir.output",
311 | )
312 |
313 | jsonnet_to_json(
314 | name = "multiple_outs_toplevel",
315 | src = "multiple_outs.jsonnet",
316 | outs = [
317 | "dir1/file1.json",
318 | "dir2/file2.json",
319 | ],
320 | )
321 |
322 | jsonnet_to_json(
323 | name = "multiple_outs_nested",
324 | src = "multiple_outs.jsonnet",
325 | outs = [
326 | "nested/dir1/file1.json",
327 | "nested/dir2/file2.json",
328 | ],
329 | )
330 |
331 | jsonnet_to_json(
332 | name = "multiple_outs_nested_asymmetric",
333 | src = "multiple_outs_nested_asymmetric.jsonnet",
334 | outs = [
335 | "multiple_outs_nested_asymmetric/aaaaa/file.json",
336 | "multiple_outs_nested_asymmetric/file.json",
337 | ],
338 | )
339 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/jsonnet/jsonnet.bzl:
--------------------------------------------------------------------------------
1 | # Copyright 2015 The Bazel Authors. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | load("@bazel_skylib//lib:paths.bzl", "paths")
15 | load("@bazel_skylib//lib:shell.bzl", "shell")
16 |
17 | _JSONNET_FILETYPE = [
18 | ".jsonnet",
19 | ".libsonnet",
20 | ".json",
21 | ]
22 |
23 | JsonnetLibraryInfo = provider(
24 | fields = {
25 | "imports": "Depset of Strings containing import flags set by transitive dependency targets.",
26 | "short_imports": "Depset of Strings containing import flags set by transitive dependency targets, when invoking Jsonnet as part of a test where dependencies are stored in runfiles.",
27 | "transitive_jsonnet_files": "Depset of Files containing sources of transitive dependencies",
28 | },
29 | )
30 |
31 | def _get_import_paths(label, files, imports, short_path):
32 | # TODO: Is there a cleaner way to compute the short paths here?
33 | return [
34 | # Implicitly add the workspace root as an import path.
35 | paths.join(
36 | ".",
37 | "" if short_path else file.root.path,
38 | label.workspace_root.replace("external/", "../") if short_path else label.workspace_root,
39 | )
40 | for file in files
41 | ] + [
42 | # Explicitly provided import paths.
43 | paths.join(
44 | ".",
45 | "" if short_path else file.root.path,
46 | label.workspace_root.replace("external/", "../") if short_path else label.workspace_root,
47 | label.package,
48 | im,
49 | )
50 | for file in files
51 | for im in imports
52 | ]
53 |
54 | def _setup_deps(deps, tla_code_libraries = {}, ext_code_libraries = {}):
55 | """Collects source files and import flags of transitive dependencies.
56 |
57 | Args:
58 | deps: List of deps labels from ctx.attr.deps.
59 | tla_code_libraries: Dict of labels to names from ctx.attr.tla_code_files.
60 | ext_code_libraries: List of deps labels from ctx.attr.ext_code_files.
61 |
62 | Returns:
63 | Returns a struct containing the following fields:
64 | transitive_sources: Depset of Files containing sources of transitive
65 | dependencies
66 | imports: Depset of Strings containing import flags set by transitive
67 | dependency targets.
68 | short_imports: Depset of Strings containing import flags set by
69 | transitive dependency targets, when invoking Jsonnet as part
70 | of a test where dependencies are stored in runfiles.
71 | """
72 | transitive_sources = []
73 | imports = []
74 | short_imports = []
75 | for dep in deps:
76 | transitive_sources.append(dep[JsonnetLibraryInfo].transitive_jsonnet_files)
77 | imports.append(dep[JsonnetLibraryInfo].imports)
78 | short_imports.append(dep[JsonnetLibraryInfo].short_imports)
79 |
80 | for code_file in tla_code_libraries.keys() + ext_code_libraries.keys():
81 | transitive_sources.append(code_file[JsonnetLibraryInfo].transitive_jsonnet_files)
82 | imports.append(code_file[JsonnetLibraryInfo].imports)
83 | short_imports.append(code_file[JsonnetLibraryInfo].short_imports)
84 |
85 | return struct(
86 | imports = depset(transitive = imports),
87 | short_imports = depset(transitive = short_imports),
88 | transitive_sources = depset(transitive = transitive_sources, order = "postorder"),
89 | )
90 |
91 | def _jsonnet_library_impl(ctx):
92 | """Implementation of the jsonnet_library rule."""
93 | depinfo = _setup_deps(ctx.attr.deps)
94 | sources = depset(ctx.files.srcs, transitive = [depinfo.transitive_sources])
95 | imports = depset(
96 | _get_import_paths(ctx.label, ctx.files.srcs, ctx.attr.imports, False),
97 | transitive = [depinfo.imports],
98 | )
99 | short_imports = depset(
100 | _get_import_paths(ctx.label, ctx.files.srcs, ctx.attr.imports, True),
101 | transitive = [depinfo.short_imports],
102 | )
103 |
104 | return [
105 | DefaultInfo(
106 | files = depset(ctx.files.srcs),
107 | runfiles = ctx.runfiles(
108 | files = ctx.files.data,
109 | ).merge_all([
110 | target[DefaultInfo].default_runfiles
111 | for attr in (ctx.attr.data, ctx.attr.deps, ctx.attr.srcs)
112 | for target in attr
113 | ]),
114 | ),
115 | JsonnetLibraryInfo(
116 | imports = imports,
117 | short_imports = short_imports,
118 | transitive_jsonnet_files = sources,
119 | ),
120 | ]
121 |
122 | def _quote(s):
123 | return '"' + s.replace('"', '\\"') + '"'
124 |
125 | def _stamp_resolve(ctx, string, output):
126 | stamps = [ctx.info_file, ctx.version_file]
127 | stamp_args = [
128 | "--stamp-info-file=%s" % sf.path
129 | for sf in stamps
130 | ]
131 | ctx.actions.run(
132 | executable = ctx.executable._stamper,
133 | arguments = [
134 | "--format=%s" % string,
135 | "--output=%s" % output.path,
136 | ] + stamp_args,
137 | inputs = stamps,
138 | tools = [ctx.executable._stamper],
139 | outputs = [output],
140 | mnemonic = "Stamp",
141 | )
142 |
143 | def _make_resolve(ctx, val):
144 | if val[0:2] == "$(" and val[-1] == ")":
145 | return ctx.var[val[2:-1]]
146 | else:
147 | return val
148 |
149 | def _make_stamp_resolve(ext_vars, ctx, relative = True):
150 | results = {}
151 | stamp_inputs = []
152 | for key, val in ext_vars.items():
153 | # Check for make variables
154 | val = _make_resolve(ctx, val)
155 |
156 | # Check for stamp variables
157 | if ctx.attr.stamp_keys:
158 | if key in ctx.attr.stamp_keys:
159 | stamp_file = ctx.actions.declare_file(ctx.label.name + ".jsonnet_" + key)
160 | _stamp_resolve(ctx, val, stamp_file)
161 | if relative:
162 | val = "$(cat %s)" % stamp_file.short_path
163 | else:
164 | val = "$(cat %s)" % stamp_file.path
165 | stamp_inputs.append(stamp_file)
166 |
167 | results[key] = val
168 |
169 | return results, stamp_inputs
170 |
171 | def _jsonnet_to_json_impl(ctx):
172 | """Implementation of the jsonnet_to_json rule."""
173 |
174 | if ctx.attr.vars:
175 | print("'vars' attribute is deprecated, please use 'ext_strs'.")
176 | if ctx.attr.code_vars:
177 | print("'code_vars' attribute is deprecated, please use 'ext_code'.")
178 |
179 | jsonnet_ext_strs = ctx.attr.ext_strs or ctx.attr.vars
180 | jsonnet_ext_str_envs = ctx.attr.ext_str_envs
181 | jsonnet_ext_code = ctx.attr.ext_code or ctx.attr.code_vars
182 | jsonnet_ext_code_envs = ctx.attr.ext_code_envs
183 | jsonnet_ext_str_files = ctx.files.ext_str_files
184 | jsonnet_ext_str_file_vars = ctx.attr.ext_str_file_vars
185 | jsonnet_ext_code_files = ctx.files.ext_code_files
186 | jsonnet_ext_code_file_vars = ctx.attr.ext_code_file_vars
187 | jsonnet_ext_code_libraries = ctx.attr.ext_code_libraries
188 | jsonnet_tla_strs = ctx.attr.tla_strs
189 | jsonnet_tla_str_envs = ctx.attr.tla_str_envs
190 | jsonnet_tla_code = ctx.attr.tla_code
191 | jsonnet_tla_code_envs = ctx.attr.tla_code_envs
192 | jsonnet_tla_str_files = ctx.attr.tla_str_files
193 | jsonnet_tla_code_files = ctx.attr.tla_code_files
194 | jsonnet_tla_code_libraries = ctx.attr.tla_code_libraries
195 |
196 | depinfo = _setup_deps(ctx.attr.deps, jsonnet_tla_code_libraries, jsonnet_ext_code_libraries)
197 |
198 | jsonnet_ext_strs, strs_stamp_inputs = _make_stamp_resolve(ctx.attr.ext_strs, ctx, False)
199 | jsonnet_ext_code, code_stamp_inputs = _make_stamp_resolve(ctx.attr.ext_code, ctx, False)
200 | jsonnet_tla_strs, tla_strs_stamp_inputs = _make_stamp_resolve(ctx.attr.tla_strs, ctx, False)
201 | jsonnet_tla_code, tla_code_stamp_inputs = _make_stamp_resolve(ctx.attr.tla_code, ctx, False)
202 | stamp_inputs = strs_stamp_inputs + code_stamp_inputs + tla_strs_stamp_inputs + tla_code_stamp_inputs
203 |
204 | if len(jsonnet_ext_str_file_vars) != len(jsonnet_ext_str_files):
205 | fail("Mismatch of ext_str_file_vars ({}) to ext_str_files ({})".format(jsonnet_ext_str_file_vars, jsonnet_ext_str_files))
206 |
207 | if len(jsonnet_ext_code_file_vars) != len(jsonnet_ext_code_files):
208 | fail("Mismatch of ext_code_file_vars ({}) to ext_code_files ({})".format(jsonnet_ext_code_file_vars, jsonnet_ext_code_files))
209 |
210 | if ctx.attr.stamp_keys and not stamp_inputs:
211 | fail("Stamping requested but found no stamp variable to resolve for.")
212 |
213 | other_args = ctx.attr.extra_args + (["-y"] if ctx.attr.yaml_stream else [])
214 |
215 | command = (
216 | [
217 | "set -e;",
218 | ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.compiler.path,
219 | ] +
220 | ["-J " + shell.quote(im) for im in _get_import_paths(ctx.label, [ctx.file.src], ctx.attr.imports, False)] +
221 | ["-J " + shell.quote(im) for im in depinfo.imports.to_list()] +
222 | other_args +
223 | ["--ext-str %s=%s" %
224 | (_quote(key), _quote(val)) for key, val in jsonnet_ext_strs.items()] +
225 | ["--ext-str '%s'" %
226 | ext_str_env for ext_str_env in jsonnet_ext_str_envs] +
227 | ["--ext-code %s=%s" %
228 | (_quote(key), _quote(val)) for key, val in jsonnet_ext_code.items()] +
229 | ["--ext-code %s" %
230 | ext_code_env for ext_code_env in jsonnet_ext_code_envs] +
231 | ["--ext-str-file %s=%s" %
232 | (var, jfile.path) for var, jfile in zip(jsonnet_ext_str_file_vars, jsonnet_ext_str_files)] +
233 | ["--ext-code-file %s=%s" %
234 | (var, jfile.path) for var, jfile in zip(jsonnet_ext_code_file_vars, jsonnet_ext_code_files)] +
235 | ["--ext-code-file %s=%s" %
236 | (_quote(val), _quote(key[DefaultInfo].files.to_list()[0].path)) for key, val in jsonnet_ext_code_libraries.items()] +
237 | ["--tla-str %s=%s" %
238 | (_quote(key), _quote(val)) for key, val in jsonnet_tla_strs.items()] +
239 | ["--tla-str '%s'" %
240 | tla_str_env for tla_str_env in jsonnet_tla_str_envs] +
241 | ["--tla-code %s=%s" %
242 | (_quote(key), _quote(val)) for key, val in jsonnet_tla_code.items()] +
243 | ["--tla-code %s" %
244 | tla_code_env for tla_code_env in jsonnet_tla_code_envs] +
245 | ["--tla-str-file %s=%s" %
246 | (var, jfile.files.to_list()[0].path) for jfile, var in jsonnet_tla_str_files.items()] +
247 | ["--tla-code-file %s=%s" %
248 | (var, jfile.files.to_list()[0].path) for jfile, var in jsonnet_tla_code_files.items()] +
249 | ["--tla-code-file %s=%s" %
250 | (_quote(val), _quote(key[DefaultInfo].files.to_list()[0].path)) for key, val in jsonnet_tla_code_libraries.items()]
251 | )
252 |
253 | outputs = []
254 |
255 | if (ctx.attr.outs or ctx.attr.multiple_outputs) and ctx.attr.out_dir:
256 | fail("The \"outs\" and \"multiple_outputs\" attributes are " +
257 | "incompatible with \"out_dir\".")
258 |
259 | # If multiple_outputs or out_dir is set, then jsonnet will be
260 | # invoked with the -m flag for multiple outputs. Otherwise, jsonnet
261 | # will write the resulting JSON to stdout, which is redirected into
262 | # a single JSON output file.
263 | if ctx.attr.out_dir:
264 | if ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.manifest_file_support:
265 | output_manifest = ctx.actions.declare_file("_%s_outs.mf" % ctx.label.name)
266 | outputs.append(output_manifest)
267 | command += ["-o", output_manifest.path]
268 |
269 | out_dir = ctx.actions.declare_directory(ctx.attr.out_dir)
270 | outputs.append(out_dir)
271 | command += [ctx.file.src.path, "-m", out_dir.path]
272 | command += ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.create_directory_flags
273 | elif len(ctx.attr.outs) > 1 or ctx.attr.multiple_outputs:
274 | # Assume that the output directory is the leading part of the
275 | # directory name that is shared by all output files.
276 | base_dirname = ctx.outputs.outs[0].dirname.split("/")
277 | for output in ctx.outputs.outs[1:]:
278 | component_pairs = zip(base_dirname, output.dirname.split("/"))
279 | base_dirname = base_dirname[:len(component_pairs)]
280 | for i, (part1, part2) in enumerate(component_pairs):
281 | if part1 != part2:
282 | base_dirname = base_dirname[:i]
283 | break
284 | if ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.manifest_file_support:
285 | output_manifest = ctx.actions.declare_file("_%s_outs.mf" % ctx.label.name)
286 | outputs.append(output_manifest)
287 | command += ["-o", output_manifest.path]
288 |
289 | outputs += ctx.outputs.outs
290 | command += ["-m", "/".join(base_dirname), ctx.file.src.path]
291 | command += ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.create_directory_flags
292 | elif len(ctx.attr.outs) > 1:
293 | fail("Only one file can be specified in outs if multiple_outputs is " +
294 | "not set.")
295 | else:
296 | compiled_json = ctx.outputs.outs[0]
297 | outputs.append(compiled_json)
298 | command += [ctx.file.src.path, "-o", compiled_json.path]
299 |
300 | transitive_data = depset(transitive = [dep.data_runfiles.files for dep in ctx.attr.deps] +
301 | [l.files for l in jsonnet_tla_code_files.keys()] +
302 | [l.files for l in jsonnet_tla_str_files.keys()])
303 | # NB(sparkprime): (1) transitive_data is never used, since runfiles is only
304 | # used when .files is pulled from it. (2) This makes sense - jsonnet does
305 | # not need transitive dependencies to be passed on the commandline. It
306 | # needs the -J but that is handled separately.
307 |
308 | files = jsonnet_ext_str_files + jsonnet_ext_code_files
309 |
310 | runfiles = ctx.runfiles(
311 | collect_data = True,
312 | files = files,
313 | transitive_files = transitive_data,
314 | )
315 |
316 | compile_inputs = (
317 | [ctx.file.src] +
318 | runfiles.files.to_list() +
319 | depinfo.transitive_sources.to_list()
320 | )
321 |
322 | ctx.actions.run_shell(
323 | inputs = compile_inputs + stamp_inputs,
324 | toolchain = Label("//jsonnet:toolchain_type"),
325 | tools = [ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.compiler],
326 | outputs = outputs,
327 | mnemonic = "Jsonnet",
328 | command = " ".join(command),
329 | use_default_shell_env = True,
330 | progress_message = "Compiling Jsonnet to JSON for " + ctx.label.name,
331 | )
332 |
333 | if ctx.attr.out_dir:
334 | return [DefaultInfo(
335 | files = depset([out_dir]),
336 | runfiles = ctx.runfiles(files = [out_dir]),
337 | )]
338 |
339 | return [DefaultInfo(
340 | files = depset(outputs),
341 | runfiles = ctx.runfiles(files = outputs),
342 | )]
343 |
344 | _EXIT_CODE_COMPARE_COMMAND = """
345 | EXIT_CODE=$?
346 | EXPECTED_EXIT_CODE=%d
347 | if [ $EXIT_CODE -ne $EXPECTED_EXIT_CODE ] ; then
348 | echo "FAIL (exit code): %s"
349 | echo "Expected: $EXPECTED_EXIT_CODE"
350 | echo "Actual: $EXIT_CODE"
351 | if [ %s = true ]; then
352 | echo "Output: $OUTPUT"
353 | fi
354 | exit 1
355 | fi
356 | """
357 |
358 | _DIFF_COMMAND = """
359 | GOLDEN=$(%s %s)
360 | if [ "$OUTPUT" != "$GOLDEN" ]; then
361 | echo "FAIL (output mismatch): %s"
362 | echo "Diff:"
363 | diff -u <(echo "$GOLDEN") <(echo "$OUTPUT")
364 | if [ %s = true ]; then
365 | echo "Expected: $GOLDEN"
366 | echo "Actual: $OUTPUT"
367 | fi
368 | exit 1
369 | fi
370 | """
371 |
372 | _REGEX_DIFF_COMMAND = """
373 | # Needed due to rust-jsonnet, go-jsonnet and cpp-jsonnet producing different
374 | # output (casing, text etc).
375 | shopt -s nocasematch
376 |
377 | GOLDEN_REGEX=$(%s %s)
378 | if [[ ! "$OUTPUT" =~ $GOLDEN_REGEX ]]; then
379 | echo "FAIL (regex mismatch): %s"
380 | if [ %s = true ]; then
381 | echo "Output: $OUTPUT"
382 | fi
383 | exit 1
384 | fi
385 | """
386 |
387 | def _jsonnet_to_json_test_impl(ctx):
388 | """Implementation of the jsonnet_to_json_test rule."""
389 | depinfo = _setup_deps(ctx.attr.deps, ctx.attr.tla_code_libraries, ctx.attr.ext_code_libraries)
390 |
391 | golden_files = []
392 | diff_command = ""
393 | if ctx.file.golden:
394 | golden_files.append(ctx.file.golden)
395 |
396 | # Note that we only run jsonnet to canonicalize the golden output if the
397 | # expected return code is 0, and canonicalize_golden was not explicitly disabled.
398 | # Otherwise, the golden file contains the
399 | # expected error output.
400 |
401 | # For legacy reasons, we also disable canonicalize_golden for yaml_streams.
402 | canonicalize = ctx.attr.canonicalize_golden and not ctx.attr.yaml_stream
403 | dump_golden_cmd = (ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.compiler.short_path if ctx.attr.error == 0 and canonicalize else "/bin/cat")
404 | if ctx.attr.regex:
405 | diff_command = _REGEX_DIFF_COMMAND % (
406 | dump_golden_cmd,
407 | ctx.file.golden.short_path,
408 | ctx.label.name,
409 | "true" if ctx.attr.output_file_contents else "false",
410 | )
411 | else:
412 | diff_command = _DIFF_COMMAND % (
413 | dump_golden_cmd,
414 | ctx.file.golden.short_path,
415 | ctx.label.name,
416 | "true" if ctx.attr.output_file_contents else "false",
417 | )
418 |
419 | jsonnet_ext_str_envs = ctx.attr.ext_str_envs
420 | jsonnet_ext_code_envs = ctx.attr.ext_code_envs
421 | jsonnet_ext_str_files = ctx.files.ext_str_files
422 | jsonnet_ext_str_file_vars = ctx.attr.ext_str_file_vars
423 | jsonnet_ext_code_files = ctx.files.ext_code_files
424 | jsonnet_ext_code_file_vars = ctx.attr.ext_code_file_vars
425 | jsonnet_ext_code_libraries = ctx.attr.ext_code_libraries
426 | jsonnet_tla_str_envs = ctx.attr.tla_str_envs
427 | jsonnet_tla_code_envs = ctx.attr.tla_code_envs
428 | jsonnet_tla_str_files = ctx.attr.tla_str_files
429 | jsonnet_tla_code_files = ctx.attr.tla_code_files
430 | jsonnet_tla_code_libraries = ctx.attr.tla_code_libraries
431 |
432 | jsonnet_ext_strs, strs_stamp_inputs = _make_stamp_resolve(ctx.attr.ext_strs, ctx, True)
433 | jsonnet_ext_code, code_stamp_inputs = _make_stamp_resolve(ctx.attr.ext_code, ctx, True)
434 | jsonnet_tla_strs, tla_strs_stamp_inputs = _make_stamp_resolve(ctx.attr.tla_strs, ctx, True)
435 | jsonnet_tla_code, tla_code_stamp_inputs = _make_stamp_resolve(ctx.attr.tla_code, ctx, True)
436 | stamp_inputs = strs_stamp_inputs + code_stamp_inputs + tla_strs_stamp_inputs + tla_code_stamp_inputs
437 |
438 | if len(jsonnet_ext_str_file_vars) != len(jsonnet_ext_str_files):
439 | fail("Mismatch of ext_str_file_vars ({}) to ext_str_files ({})".format(jsonnet_ext_str_file_vars, jsonnet_ext_str_files))
440 |
441 | if len(jsonnet_ext_code_file_vars) != len(jsonnet_ext_code_files):
442 | fail("Mismatch of ext_code_file_vars ({}) to ext_code_files ({})".format(jsonnet_ext_code_file_vars, jsonnet_ext_code_files))
443 |
444 | other_args = ctx.attr.extra_args + (["-y"] if ctx.attr.yaml_stream else [])
445 | jsonnet_command = " ".join(
446 | ["OUTPUT=$(%s" % ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.compiler.short_path] +
447 | ["-J " + shell.quote(im) for im in _get_import_paths(ctx.label, [ctx.file.src], ctx.attr.imports, True)] +
448 | ["-J " + shell.quote(im) for im in depinfo.short_imports.to_list()] +
449 | other_args +
450 | ["--ext-str %s=%s" %
451 | (_quote(key), _quote(val)) for key, val in jsonnet_ext_strs.items()] +
452 | ["--ext-str %s" %
453 | ext_str_env for ext_str_env in jsonnet_ext_str_envs] +
454 | ["--ext-code %s=%s" %
455 | (_quote(key), _quote(val)) for key, val in jsonnet_ext_code.items()] +
456 | ["--ext-code %s" %
457 | ext_code_env for ext_code_env in jsonnet_ext_code_envs] +
458 | ["--ext-str-file %s=%s" %
459 | (var, jfile.short_path) for var, jfile in zip(jsonnet_ext_str_file_vars, jsonnet_ext_str_files)] +
460 | ["--ext-code-file %s=%s" %
461 | (var, jfile.short_path) for var, jfile in zip(jsonnet_ext_code_file_vars, jsonnet_ext_code_files)] +
462 | ["--ext-code-file %s=%s" %
463 | (_quote(val), _quote(key[DefaultInfo].files.to_list()[0].short_path)) for key, val in jsonnet_ext_code_libraries.items()] +
464 | ["--tla-str %s=%s" %
465 | (_quote(key), _quote(val)) for key, val in jsonnet_tla_strs.items()] +
466 | ["--tla-str '%s'" %
467 | tla_str_env for tla_str_env in jsonnet_tla_str_envs] +
468 | ["--tla-code %s=%s" %
469 | (_quote(key), _quote(val)) for key, val in jsonnet_tla_code.items()] +
470 | ["--tla-code %s" %
471 | tla_code_env for tla_code_env in jsonnet_tla_code_envs] +
472 | ["--tla-str-file %s=%s" %
473 | (var, jfile.files.to_list()[0].short_path) for jfile, var in jsonnet_tla_str_files.items()] +
474 | ["--tla-code-file %s=%s" %
475 | (var, jfile.files.to_list()[0].short_path) for jfile, var in jsonnet_tla_code_files.items()] +
476 | ["--tla-code-file %s=%s" %
477 | (_quote(val), _quote(key[DefaultInfo].files.to_list()[0].short_path)) for key, val in jsonnet_tla_code_libraries.items()] +
478 | [
479 | ctx.file.src.short_path,
480 | "2>&1)",
481 | ],
482 | )
483 |
484 | command = [
485 | "#!/bin/bash",
486 | jsonnet_command,
487 | _EXIT_CODE_COMPARE_COMMAND % (
488 | ctx.attr.error,
489 | ctx.label.name,
490 | "true" if ctx.attr.output_file_contents else "false",
491 | ),
492 | ]
493 | if diff_command:
494 | command.append(diff_command)
495 |
496 | ctx.actions.write(
497 | output = ctx.outputs.executable,
498 | content = "\n".join(command),
499 | is_executable = True,
500 | )
501 |
502 | transitive_data = depset(
503 | transitive = [dep.data_runfiles.files for dep in ctx.attr.deps] +
504 | [l.files for l in jsonnet_tla_code_files.keys()] +
505 | [l.files for l in jsonnet_tla_str_files.keys()],
506 | )
507 |
508 | test_inputs = (
509 | [
510 | ctx.file.src,
511 | ctx.toolchains["//jsonnet:toolchain_type"].jsonnetinfo.compiler,
512 | ] +
513 | golden_files +
514 | transitive_data.to_list() +
515 | depinfo.transitive_sources.to_list() +
516 | jsonnet_ext_str_files +
517 | jsonnet_ext_code_files +
518 | stamp_inputs
519 | )
520 |
521 | return [DefaultInfo(
522 | runfiles = ctx.runfiles(
523 | files = test_inputs,
524 | transitive_files = transitive_data,
525 | collect_data = True,
526 | ),
527 | )]
528 |
529 | _jsonnet_common_attrs = {
530 | "data": attr.label_list(
531 | allow_files = True,
532 | ),
533 | "imports": attr.string_list(
534 | doc = "List of import `-J` flags to be passed to the `jsonnet` compiler.",
535 | ),
536 | "deps": attr.label_list(
537 | doc = "List of targets that are required by the `srcs` Jsonnet files.",
538 | providers = [JsonnetLibraryInfo],
539 | allow_files = False,
540 | ),
541 | }
542 |
543 | _jsonnet_library_attrs = {
544 | "srcs": attr.label_list(
545 | doc = "List of `.jsonnet` files that comprises this Jsonnet library",
546 | allow_files = _JSONNET_FILETYPE,
547 | ),
548 | }
549 |
550 | jsonnet_library = rule(
551 | implementation = _jsonnet_library_impl,
552 | attrs = dict(_jsonnet_library_attrs.items() +
553 | _jsonnet_common_attrs.items()),
554 | doc = """Creates a logical set of Jsonnet files.
555 |
556 | Example:
557 | Suppose you have the following directory structure:
558 |
559 | ```
560 | [workspace]/
561 | MODULE.bazel
562 | configs/
563 | BUILD
564 | backend.jsonnet
565 | frontend.jsonnet
566 | ```
567 |
568 | You can use the `jsonnet_library` rule to build a collection of `.jsonnet`
569 | files that can be imported by other `.jsonnet` files as dependencies:
570 |
571 | `configs/BUILD`:
572 |
573 | ```python
574 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
575 |
576 | jsonnet_library(
577 | name = "configs",
578 | srcs = [
579 | "backend.jsonnet",
580 | "frontend.jsonnet",
581 | ],
582 | )
583 | ```
584 | """,
585 | )
586 |
587 | _jsonnet_compile_attrs = {
588 | "src": attr.label(
589 | doc = "The `.jsonnet` file to convert to JSON.",
590 | allow_single_file = _JSONNET_FILETYPE,
591 | ),
592 | "code_vars": attr.string_dict(
593 | doc = "Deprecated (use 'ext_code').",
594 | ),
595 | "ext_code": attr.string_dict(),
596 | "ext_code_envs": attr.string_list(),
597 | "ext_code_file_vars": attr.string_list(),
598 | "ext_code_files": attr.label_list(
599 | allow_files = True,
600 | ),
601 | "ext_code_libraries": attr.label_keyed_string_dict(
602 | doc = "Include jsonnet_library as an extvar with the key value",
603 | providers = [JsonnetLibraryInfo],
604 | ),
605 | "ext_str_envs": attr.string_list(),
606 | "ext_str_file_vars": attr.string_list(),
607 | "ext_str_files": attr.label_list(
608 | allow_files = True,
609 | ),
610 | "ext_strs": attr.string_dict(),
611 | "tla_code": attr.string_dict(),
612 | "tla_code_envs": attr.string_list(),
613 | "tla_strs": attr.string_dict(),
614 | "tla_str_envs": attr.string_list(),
615 | "tla_str_files": attr.label_keyed_string_dict(allow_files = True),
616 | "tla_code_files": attr.label_keyed_string_dict(allow_files = True),
617 | "tla_code_libraries": attr.label_keyed_string_dict(
618 | doc = "Include jsonnet_library as a top-level argument as the given value",
619 | providers = [JsonnetLibraryInfo],
620 | ),
621 | "stamp_keys": attr.string_list(
622 | default = [],
623 | mandatory = False,
624 | ),
625 | "yaml_stream": attr.bool(
626 | default = False,
627 | mandatory = False,
628 | ),
629 | "extra_args": attr.string_list(
630 | doc = """Additional command line arguments for the Jsonnet interpreter.
631 |
632 | For example, setting this argument to `["--string"]` causes the interpreter to
633 | manifest the output file(s) as plain text instead of JSON.
634 | """,
635 | ),
636 | "vars": attr.string_dict(
637 | doc = "Deprecated (use 'ext_strs').",
638 | ),
639 | "_stamper": attr.label(
640 | default = Label("//jsonnet:stamper"),
641 | cfg = "exec",
642 | executable = True,
643 | allow_files = True,
644 | ),
645 | }
646 |
647 | _jsonnet_to_json_attrs = {
648 | "outs": attr.output_list(
649 | doc = """\
650 | Names of the output `.json` files to be generated by this rule.
651 |
652 | If you are generating only a single JSON file and are not using jsonnet
653 | multiple output files, then this attribute should only contain the file
654 | name of the JSON file you are generating.
655 |
656 | If you are generating multiple JSON files using jsonnet multiple file output
657 | (`jsonnet -m`), then list the file names of all the JSON files to be
658 | generated. The file names specified here must match the file names
659 | specified in your `src` Jsonnet file.
660 |
661 | For the case where multiple file output is used but only for generating one
662 | output file, set the `multiple_outputs` attribute to 1 to explicitly enable
663 | the `-m` flag for multiple file output.
664 |
665 | This attribute is incompatible with `out_dir`.
666 | """,
667 | ),
668 | "multiple_outputs": attr.bool(
669 | doc = """\
670 | Set to `True` to explicitly enable multiple file output via the `jsonnet -m` flag.
671 |
672 | This is used for the case where multiple file output is used but only for
673 | generating a single output file. For example:
674 |
675 | ```
676 | local foo = import "foo.jsonnet";
677 |
678 | {
679 | "foo.json": foo,
680 | }
681 | ```
682 |
683 | This attribute is incompatible with `out_dir`.
684 | """,
685 | ),
686 | "out_dir": attr.string(
687 | doc = """\
688 | Name of the directory where output files are stored.
689 |
690 | If the names of output files are not known up front, this option can be
691 | used to write all output files to a single directory artifact. Files in
692 | this directory cannot be referenced individually.
693 |
694 | This attribute is incompatible with `outs` and `multiple_outputs`.
695 | """,
696 | ),
697 | }
698 |
699 | jsonnet_to_json = rule(
700 | _jsonnet_to_json_impl,
701 | attrs = dict(_jsonnet_compile_attrs.items() +
702 | _jsonnet_to_json_attrs.items() +
703 | _jsonnet_common_attrs.items()),
704 | toolchains = ["//jsonnet:toolchain_type"],
705 | doc = """\
706 | Compiles Jsonnet code to JSON.
707 |
708 | Example:
709 | ### Example
710 |
711 | Suppose you have the following directory structure:
712 |
713 | ```
714 | [workspace]/
715 | MODULE.bazel
716 | workflows/
717 | BUILD
718 | workflow.libsonnet
719 | wordcount.jsonnet
720 | intersection.jsonnet
721 | ```
722 |
723 | Say that `workflow.libsonnet` is a base configuration library for a workflow
724 | scheduling system and `wordcount.jsonnet` and `intersection.jsonnet` both
725 | import `workflow.libsonnet` to define workflows for performing a wordcount and
726 | intersection of two files, respectively.
727 |
728 | First, create a `jsonnet_library` target with `workflow.libsonnet`:
729 |
730 | `workflows/BUILD`:
731 |
732 | ```python
733 | load("@rules_jsonnet//jsonnet:jsonnet.bzl", "jsonnet_library")
734 |
735 | jsonnet_library(
736 | name = "workflow",
737 | srcs = ["workflow.libsonnet"],
738 | )
739 | ```
740 |
741 | To compile `wordcount.jsonnet` and `intersection.jsonnet` to JSON, define two
742 | `jsonnet_to_json` targets:
743 |
744 | ```python
745 | jsonnet_to_json(
746 | name = "wordcount",
747 | src = "wordcount.jsonnet",
748 | outs = ["wordcount.json"],
749 | deps = [":workflow"],
750 | )
751 |
752 | jsonnet_to_json(
753 | name = "intersection",
754 | src = "intersection.jsonnet",
755 | outs = ["intersection.json"],
756 | deps = [":workflow"],
757 | )
758 | ```
759 |
760 | ### Example: Multiple output files
761 |
762 | To use Jsonnet's [multiple output files][multiple-output-files], suppose you
763 | add a file `shell-workflows.jsonnet` that imports `wordcount.jsonnet` and
764 | `intersection.jsonnet`:
765 |
766 | `workflows/shell-workflows.jsonnet`:
767 |
768 | ```
769 | local wordcount = import "workflows/wordcount.jsonnet";
770 | local intersection = import "workflows/intersection.jsonnet";
771 |
772 | {
773 | "wordcount-workflow.json": wordcount,
774 | "intersection-workflow.json": intersection,
775 | }
776 | ```
777 |
778 | To compile `shell-workflows.jsonnet` into the two JSON files,
779 | `wordcount-workflow.json` and `intersection-workflow.json`, first create a
780 | `jsonnet_library` target containing the two files that
781 | `shell-workflows.jsonnet` depends on:
782 |
783 | ```python
784 | jsonnet_library(
785 | name = "shell-workflows-lib",
786 | srcs = [
787 | "wordcount.jsonnet",
788 | "intersection.jsonnet",
789 | ],
790 | deps = [":workflow"],
791 | )
792 | ```
793 |
794 | Then, create a `jsonnet_to_json` target and set `outs` to the list of output
795 | files to indicate that multiple output JSON files are generated:
796 |
797 | ```python
798 | jsonnet_to_json(
799 | name = "shell-workflows",
800 | src = "shell-workflows.jsonnet",
801 | deps = [":shell-workflows-lib"],
802 | outs = [
803 | "wordcount-workflow.json",
804 | "intersection-workflow.json",
805 | ],
806 | )
807 | ```
808 |
809 | [multiple-output-files]: https://jsonnet.org/learning/getting_started.html#multi
810 | """,
811 | )
812 |
813 | _jsonnet_to_json_test_attrs = {
814 | "error": attr.int(
815 | doc = "The expected error code from running `jsonnet` on `src`.",
816 | ),
817 | "golden": attr.label(
818 | doc = (
819 | "The expected (combined stdout and stderr) output to compare to the " +
820 | "output of running `jsonnet` on `src`."
821 | ),
822 | allow_single_file = True,
823 | ),
824 | "regex": attr.bool(
825 | doc = (
826 | "Set to 1 if `golden` contains a regex used to match the output of " +
827 | "running `jsonnet` on `src`."
828 | ),
829 | ),
830 | "canonicalize_golden": attr.bool(default = True),
831 | "output_file_contents": attr.bool(default = True),
832 | }
833 |
834 | jsonnet_to_json_test = rule(
835 | _jsonnet_to_json_test_impl,
836 | attrs = dict(_jsonnet_compile_attrs.items() +
837 | _jsonnet_to_json_test_attrs.items() +
838 | _jsonnet_common_attrs.items()),
839 | toolchains = ["//jsonnet:toolchain_type"],
840 | executable = True,
841 | test = True,
842 | doc = """\
843 | Compiles Jsonnet code to JSON and checks the output.
844 |
845 | Example:
846 | Suppose you have the following directory structure:
847 |
848 | ```
849 | [workspace]/
850 | MODULE.bazel
851 | config/
852 | BUILD
853 | base_config.libsonnet
854 | test_config.jsonnet
855 | test_config.json
856 | ```
857 |
858 | Suppose that `base_config.libsonnet` is a library Jsonnet file, containing the
859 | base configuration for a service. Suppose that `test_config.jsonnet` is a test
860 | configuration file that is used to test `base_config.jsonnet`, and
861 | `test_config.json` is the expected JSON output from compiling
862 | `test_config.jsonnet`.
863 |
864 | The `jsonnet_to_json_test` rule can be used to verify that compiling a Jsonnet
865 | file produces the expected JSON output. Simply define a `jsonnet_to_json_test`
866 | target and provide the input test Jsonnet file and the `golden` file containing
867 | the expected JSON output:
868 |
869 | `config/BUILD`:
870 |
871 | ```python
872 | load(
873 | "@rules_jsonnet//jsonnet:jsonnet.bzl",
874 | "jsonnet_library",
875 | "jsonnet_to_json_test",
876 | )
877 |
878 | jsonnet_library(
879 | name = "base_config",
880 | srcs = ["base_config.libsonnet"],
881 | )
882 |
883 | jsonnet_to_json_test(
884 | name = "test_config_test",
885 | src = "test_config",
886 | deps = [":base_config"],
887 | golden = "test_config.json",
888 | )
889 | ```
890 |
891 | To run the test: `bazel test //config:test_config_test`
892 |
893 | ### Example: Negative tests
894 |
895 | Suppose you have the following directory structure:
896 |
897 | ```
898 | [workspace]/
899 | MODULE.bazel
900 | config/
901 | BUILD
902 | base_config.libsonnet
903 | invalid_config.jsonnet
904 | invalid_config.output
905 | ```
906 |
907 | Suppose that `invalid_config.jsonnet` is a Jsonnet file used to verify that
908 | an invalid config triggers an assertion in `base_config.jsonnet`, and
909 | `invalid_config.output` is the expected error output.
910 |
911 | The `jsonnet_to_json_test` rule can be used to verify that compiling a Jsonnet
912 | file results in an expected error code and error output. Simply define a
913 | `jsonnet_to_json_test` target and provide the input test Jsonnet file, the
914 | expected error code in the `error` attribute, and the `golden` file containing
915 | the expected error output:
916 |
917 | `config/BUILD`:
918 |
919 | ```python
920 | load(
921 | "@rules_jsonnet//jsonnet:jsonnet.bzl",
922 | "jsonnet_library",
923 | "jsonnet_to_json_test",
924 | )
925 |
926 | jsonnet_library(
927 | name = "base_config",
928 | srcs = ["base_config.libsonnet"],
929 | )
930 |
931 | jsonnet_to_json_test(
932 | name = "invalid_config_test",
933 | src = "invalid_config",
934 | deps = [":base_config"],
935 | golden = "invalid_config.output",
936 | error = 1,
937 | )
938 | ```
939 |
940 | To run the test: `bazel test //config:invalid_config_test`
941 | """,
942 | )
943 |
--------------------------------------------------------------------------------