├── .gitignore ├── migrations ├── .gitkeep ├── 2021-02-15-093619_rm-job-input-artefacts │ ├── up.sql │ └── down.sql ├── 2021-02-15-094104_rm-job-output-artefacts │ ├── up.sql │ └── down.sql ├── 2021-02-05-142606_remove-package-tree-json-from-submit │ ├── up.sql │ └── down.sql ├── 2021-02-22-092534_add-release-stores │ ├── down.sql │ └── up.sql ├── 2021-01-07-122618_dedicated-releases-table │ ├── down.sql │ └── up.sql ├── 00000000000000_diesel_initial_setup │ ├── down.sql │ └── up.sql ├── 2020-10-29-143003_create_submit_envs │ ├── down.sql │ └── up.sql ├── 2020-10-29-142443_create_jobs │ ├── down.sql │ └── up.sql ├── 2020-10-29-132821_create_envvars │ ├── down.sql │ └── up.sql ├── 2020-10-29-140838_create_packages │ ├── down.sql │ └── up.sql ├── 2020-10-29-140956_create_images │ ├── down.sql │ └── up.sql ├── 2020-10-29-141023_create_submits │ ├── down.sql │ └── up.sql ├── 2020-10-29-142914_create_job_envs │ ├── down.sql │ └── up.sql ├── 2020-10-29-140235_create_githashes │ ├── down.sql │ └── up.sql ├── 2020-10-29-141904_create_endpoints │ ├── down.sql │ └── up.sql ├── 2020-10-29-142339_create_artifacts │ ├── down.sql │ └── up.sql ├── 2020-11-11-111013_job_output_artifacts │ ├── down.sql │ └── up.sql ├── 2020-10-29-142619_create_job_input_artifacts │ ├── down.sql │ └── up.sql ├── 2020-11-05-125716_drop_submit_buildplan │ ├── up.sql │ └── down.sql ├── 2020-11-11-111326_job_has_multiple_outputs │ ├── up.sql │ └── down.sql ├── 2020-11-13-161449_add_job_uuid │ ├── up.sql │ └── down.sql ├── 2020-12-03-084509_artifact_release_flag │ ├── up.sql │ └── down.sql ├── 2020-12-03-085333_artifact_belongs_to_job │ ├── down.sql │ └── up.sql ├── 2020-12-14-100454_artifact-unique-by-job │ ├── down.sql │ └── up.sql └── 2020-12-14-113756_artifact_not_unique_by_path │ ├── up.sql │ └── down.sql ├── examples └── packages │ ├── repo │ ├── s │ │ ├── foo.patch │ │ ├── 19.0 │ │ │ ├── foo.patch │ │ │ ├── s190.patch │ │ │ └── pkg.toml │ │ ├── 19.3 │ │ │ ├── s193.patch │ │ │ └── pkg.toml │ │ ├── 19.1 │ │ │ └── pkg.toml │ │ ├── 19.2 │ │ │ └── pkg.toml │ │ └── pkg.toml │ ├── h │ │ └── pkg.toml │ ├── m │ │ └── pkg.toml │ ├── n │ │ └── pkg.toml │ ├── r │ │ └── pkg.toml │ ├── t │ │ └── pkg.toml │ ├── u │ │ └── pkg.toml │ ├── z │ │ └── pkg.toml │ ├── d │ │ └── pkg.toml │ ├── e │ │ └── pkg.toml │ ├── k │ │ └── pkg.toml │ ├── l │ │ └── pkg.toml │ ├── o │ │ └── pkg.toml │ ├── p │ │ └── pkg.toml │ ├── q │ │ └── pkg.toml │ ├── v │ │ └── pkg.toml │ ├── w │ │ └── pkg.toml │ ├── x │ │ └── pkg.toml │ ├── y │ │ └── pkg.toml │ ├── a │ │ └── pkg.toml │ ├── f │ │ └── pkg.toml │ ├── i │ │ └── pkg.toml │ ├── b │ │ └── pkg.toml │ ├── c │ │ └── pkg.toml │ ├── g │ │ └── pkg.toml │ ├── j │ │ └── pkg.toml │ ├── pkg.toml │ └── config.toml │ ├── sources │ ├── a-1 │ │ └── src.source │ ├── b-2 │ │ └── src.source │ ├── c-3 │ │ └── src.source │ ├── d-4 │ │ └── src.source │ ├── e-5 │ │ └── src.source │ ├── f-6 │ │ └── src.source │ ├── g-7 │ │ └── src.source │ ├── h-8 │ │ └── src.source │ ├── i-9 │ │ └── src.source │ ├── j-10 │ │ └── src.source │ ├── k-11 │ │ └── src.source │ ├── l-12 │ │ └── src.source │ ├── m-13 │ │ └── src.source │ ├── n-14 │ │ └── src.source │ ├── o-15 │ │ └── src.source │ ├── p-16 │ │ └── src.source │ ├── q-17 │ │ └── src.source │ ├── r-18 │ │ └── src.source │ ├── t-20 │ │ └── src.source │ ├── u-21 │ │ └── src.source │ ├── v-22 │ │ └── src.source │ ├── w-23 │ │ └── src.source │ ├── x-24 │ │ └── src.source │ ├── y-25 │ │ └── src.source │ ├── z-26 │ │ └── src.source │ ├── s-19.0 │ │ └── src.source │ ├── s-19.1 │ │ └── src.source │ ├── s-19.2 │ │ └── src.source │ └── s-19.3 │ │ └── src.source │ ├── Makefile │ └── README.md ├── rustfmt.toml ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── workflows │ ├── commit-lint.yml │ └── cargo.yml ├── ISSUE_TEMPLATE.md └── dependabot.yml ├── diesel.toml ├── CODE_OF_CONDUCT.md ├── doc ├── README.md ├── containers.md └── scripting.md ├── CHANGELOG.toml ├── src ├── package │ ├── util.rs │ ├── mod.rs │ ├── phase.rs │ ├── name.rs │ ├── dependency │ │ └── mod.rs │ └── source.rs ├── log │ ├── mod.rs │ ├── util.rs │ └── item.rs ├── orchestrator │ ├── mod.rs │ └── util.rs ├── repository │ ├── fs │ │ ├── mod.rs │ │ ├── element.rs │ │ └── path.rs │ ├── mod.rs │ └── pkg_toml_source.rs ├── db │ ├── mod.rs │ ├── models │ │ ├── mod.rs │ │ ├── job_env.rs │ │ ├── release_store.rs │ │ ├── envvar.rs │ │ ├── githash.rs │ │ ├── releases.rs │ │ ├── image.rs │ │ ├── endpoint.rs │ │ ├── package.rs │ │ ├── submit.rs │ │ ├── artifact.rs │ │ └── job.rs │ └── connection.rs ├── filestore │ ├── mod.rs │ ├── release.rs │ ├── util.rs │ └── staging.rs ├── endpoint │ ├── mod.rs │ ├── util.rs │ └── configuration.rs ├── job │ ├── mod.rs │ ├── resource.rs │ ├── job.rs │ ├── dag.rs │ └── runnable.rs ├── config │ ├── configuration.rs │ ├── mod.rs │ ├── container_config.rs │ ├── docker_config.rs │ ├── endpoint_config.rs │ └── util.rs ├── util │ ├── git.rs │ ├── env.rs │ ├── progress.rs │ ├── mod.rs │ └── parser.rs ├── consts.rs ├── commands │ ├── mod.rs │ ├── versions_of.rs │ ├── lint.rs │ ├── env_of.rs │ ├── what_depends.rs │ ├── dependencies_of.rs │ ├── find_pkg.rs │ ├── tree_of.rs │ └── metrics.rs ├── ui │ └── mod.rs ├── schema.rs └── source │ └── mod.rs ├── shell.nix ├── scripts └── dev-pg-container.sh ├── .gitlint ├── deny.toml ├── CONTRIBUTING.md ├── Cargo.toml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /migrations/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/packages/repo/s/foo.patch: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.0/foo.patch: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.0/s190.patch: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.3/s193.patch: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/packages/sources/a-1/src.source: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /examples/packages/sources/b-2/src.source: -------------------------------------------------------------------------------- 1 | 2 2 | -------------------------------------------------------------------------------- /examples/packages/sources/c-3/src.source: -------------------------------------------------------------------------------- 1 | 3 2 | -------------------------------------------------------------------------------- /examples/packages/sources/d-4/src.source: -------------------------------------------------------------------------------- 1 | 4 2 | -------------------------------------------------------------------------------- /examples/packages/sources/e-5/src.source: -------------------------------------------------------------------------------- 1 | 5 2 | -------------------------------------------------------------------------------- /examples/packages/sources/f-6/src.source: -------------------------------------------------------------------------------- 1 | 6 2 | -------------------------------------------------------------------------------- /examples/packages/sources/g-7/src.source: -------------------------------------------------------------------------------- 1 | 7 2 | -------------------------------------------------------------------------------- /examples/packages/sources/h-8/src.source: -------------------------------------------------------------------------------- 1 | 8 2 | -------------------------------------------------------------------------------- /examples/packages/sources/i-9/src.source: -------------------------------------------------------------------------------- 1 | 9 2 | -------------------------------------------------------------------------------- /examples/packages/sources/j-10/src.source: -------------------------------------------------------------------------------- 1 | 10 2 | -------------------------------------------------------------------------------- /examples/packages/sources/k-11/src.source: -------------------------------------------------------------------------------- 1 | 11 2 | -------------------------------------------------------------------------------- /examples/packages/sources/l-12/src.source: -------------------------------------------------------------------------------- 1 | 12 2 | -------------------------------------------------------------------------------- /examples/packages/sources/m-13/src.source: -------------------------------------------------------------------------------- 1 | 13 2 | -------------------------------------------------------------------------------- /examples/packages/sources/n-14/src.source: -------------------------------------------------------------------------------- 1 | 14 2 | -------------------------------------------------------------------------------- /examples/packages/sources/o-15/src.source: -------------------------------------------------------------------------------- 1 | 15 2 | -------------------------------------------------------------------------------- /examples/packages/sources/p-16/src.source: -------------------------------------------------------------------------------- 1 | 16 2 | -------------------------------------------------------------------------------- /examples/packages/sources/q-17/src.source: -------------------------------------------------------------------------------- 1 | 17 2 | -------------------------------------------------------------------------------- /examples/packages/sources/r-18/src.source: -------------------------------------------------------------------------------- 1 | 18 2 | -------------------------------------------------------------------------------- /examples/packages/sources/t-20/src.source: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /examples/packages/sources/u-21/src.source: -------------------------------------------------------------------------------- 1 | 21 2 | -------------------------------------------------------------------------------- /examples/packages/sources/v-22/src.source: -------------------------------------------------------------------------------- 1 | 22 2 | -------------------------------------------------------------------------------- /examples/packages/sources/w-23/src.source: -------------------------------------------------------------------------------- 1 | 23 2 | -------------------------------------------------------------------------------- /examples/packages/sources/x-24/src.source: -------------------------------------------------------------------------------- 1 | 24 2 | -------------------------------------------------------------------------------- /examples/packages/sources/y-25/src.source: -------------------------------------------------------------------------------- 1 | 25 2 | -------------------------------------------------------------------------------- /examples/packages/sources/z-26/src.source: -------------------------------------------------------------------------------- 1 | 26 2 | -------------------------------------------------------------------------------- /examples/packages/sources/s-19.0/src.source: -------------------------------------------------------------------------------- 1 | 19 2 | -------------------------------------------------------------------------------- /examples/packages/sources/s-19.1/src.source: -------------------------------------------------------------------------------- 1 | 19 2 | -------------------------------------------------------------------------------- /examples/packages/sources/s-19.2/src.source: -------------------------------------------------------------------------------- 1 | 19 2 | -------------------------------------------------------------------------------- /examples/packages/sources/s-19.3/src.source: -------------------------------------------------------------------------------- 1 | 19 2 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.1/pkg.toml: -------------------------------------------------------------------------------- 1 | version = "19.1" 2 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2024" 2 | style_edition = "2021" 3 | newline_style = "Unix" 4 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.2/pkg.toml: -------------------------------------------------------------------------------- 1 | version = "19.2" 2 | patches = [ 3 | "../foo.patch", 4 | ] 5 | -------------------------------------------------------------------------------- /examples/packages/repo/s/19.3/pkg.toml: -------------------------------------------------------------------------------- 1 | version = "19.3" 2 | patches = [ 3 | "s193.patch", 4 | ] 5 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /migrations/2021-02-15-093619_rm-job-input-artefacts/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here DROP TABLE job_input_artifacts -------------------------------------------------------------------------------- /migrations/2021-02-15-094104_rm-job-output-artefacts/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here DROP TABLE job_output_artifacts -------------------------------------------------------------------------------- /examples/packages/repo/s/19.0/pkg.toml: -------------------------------------------------------------------------------- 1 | version = "19.0" 2 | patches = [ 3 | "./foo.patch", 4 | "s190.patch", 5 | ] 6 | -------------------------------------------------------------------------------- /migrations/2021-02-05-142606_remove-package-tree-json-from-submit/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | ALTER TABLE 3 | submits 4 | DROP COLUMN 5 | tree 6 | -------------------------------------------------------------------------------- /diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "src/schema.rs" 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | We adhere to the same 4 | [Code of Conduct as all Rust projects](https://www.rust-lang.org/policies/code-of-conduct). 5 | 6 | -------------------------------------------------------------------------------- /examples/packages/repo/h/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "h" 2 | version = "8" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "136571b41aa14adc10c5f3c987d43c02c8f5d498" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/m/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "m" 2 | version = "13" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "feee44ad365b6b1ec75c5621a0ad067371102854" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/n/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "n" 2 | version = "14" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "030514d80869744a4e2f60d2fd37d6081f5ed01a" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/r/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "r" 2 | version = "18" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "24b9c1f3fddff79893e5304f998f2f95ebebd149" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/t/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "t" 2 | version = "20" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "d0758565fd06c37aa66b071160d156f5628cd518" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/u/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "u" 2 | version = "21" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "8eecbb71d418ef8c7d583dd506a994b1bc1c3f7b" 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/z/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "z" 2 | version = "26" 3 | 4 | [sources.src] 5 | url = "https://example.com" 6 | hash.hash = "a0361d509d714f50e954ffeb49ac18222609cf2a" 7 | 8 | -------------------------------------------------------------------------------- /migrations/2021-02-22-092534_add-release-stores/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` DROP TABLE release_stores; ALTER TABLE releases DROP COLUMN release_store_id; -------------------------------------------------------------------------------- /migrations/2021-02-05-142606_remove-package-tree-json-from-submit/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` 2 | ALTER TABLE 3 | submits 4 | ADD COLUMN 5 | tree JSONB NOT NULL 6 | -------------------------------------------------------------------------------- /examples/packages/repo/s/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "s" 2 | 3 | patches = [ "./foo.patch" ] 4 | 5 | [sources.src] 6 | url = "https://example.com" 7 | hash.hash = "ba9f376fa71904ccde2a756a24a4e47ec014ee0a" 8 | 9 | -------------------------------------------------------------------------------- /migrations/2021-01-07-122618_dedicated-releases-table/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` 2 | ALTER TABLE 3 | artifacts 4 | ADD COLUMN 5 | released boolean NOT NULL; 6 | 7 | DROP TABLE releases; 8 | -------------------------------------------------------------------------------- /examples/packages/repo/d/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "d" 2 | version = "4" 3 | 4 | [dependencies] 5 | runtime = ["j =10"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "9c6b057a2b9d96a4067a749ee3b3b0158d390cf1" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/e/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "e" 2 | version = "5" 3 | 4 | [dependencies] 5 | runtime = ["k =11"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "5d9474c0309b7ca09a182d888f73b37a8fe1362c" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/k/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "k" 2 | version = "11" 3 | 4 | [dependencies] 5 | runtime = ["u =21"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "dd71038f3463f511ee7403dbcbc87195302d891c" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/l/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "l" 2 | version = "12" 3 | 4 | [dependencies] 5 | runtime = ["u =21"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "ad552e6dc057d1d825bf49df79d6b98eba846ebe" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/o/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "o" 2 | version = "15" 3 | 4 | [dependencies] 5 | runtime = ["x =24"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "587b596f04f7db9c2cad3d6b87dd2b3a05de4f35" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/p/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "p" 2 | version = "16" 3 | 4 | [dependencies] 5 | runtime = ["x =24"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "3596ea087bfdaf52380eae441077572ed289d657" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/q/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "q" 2 | version = "17" 3 | 4 | [dependencies] 5 | runtime = ["v =22"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "ad48103e4fc71796e9708cafc43adeed0d1076b7" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/v/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "v" 2 | version = "22" 3 | 4 | [dependencies] 5 | runtime = ["w =23"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "a66ca4290ebaf525721fc670ea53476a15957f9e" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/w/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "w" 2 | version = "23" 3 | 4 | [dependencies] 5 | runtime = ["x =24"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "aec46dc0de48f39f98f9572b6560ca3f0916b715" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/x/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "x" 2 | version = "24" 3 | 4 | [dependencies] 5 | runtime = ["y =25"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "b31990eea1cee9f421c933461a2f3c3dd741a58b" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/y/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "y" 2 | version = "25" 3 | 4 | [dependencies] 5 | runtime = ["z =26"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "c6e4ffdb7e1f4c736fb7ab897162332b4619d9ca" 10 | 11 | -------------------------------------------------------------------------------- /doc/README.md: -------------------------------------------------------------------------------- 1 | # Documentation for butido 2 | 3 | This documentation is a general overview over the mechanisms and 4 | functionalities of butido. 5 | 6 | Like every documentation for every tool out there, it is neither complete nor up to date 7 | 8 | -------------------------------------------------------------------------------- /examples/packages/repo/a/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "a" 2 | version = "1" 3 | 4 | [dependencies] 5 | runtime = ["b =2", "c =3"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/f/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "f" 2 | version = "6" 3 | 4 | [dependencies] 5 | runtime = ["l =12", "m =13"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "ccf271b7830882da1791852baeca1737fcbe4b90" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/i/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "i" 2 | version = "9" 3 | 4 | [dependencies] 5 | runtime = ["q =17", "r =18"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "b6abd567fa79cbe0196d093a067271361dc6ca8b" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/b/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "b" 2 | version = "2" 3 | 4 | [dependencies] 5 | runtime = ["d =4", "e =5", "f =6"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "7448d8798a4380162d4b56f9b452e2f6f9e24e7a" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/c/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "c" 2 | version = "3" 3 | 4 | [dependencies] 5 | runtime = ["g =7", "h =8", "i =9"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "a3db5c13ff90a36963278c6a39e4ee3c22e2a436" 10 | 11 | -------------------------------------------------------------------------------- /examples/packages/repo/g/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "g" 2 | version = "7" 3 | 4 | [dependencies] 5 | runtime = ["m =13", "n =14", "o =15", "p =16"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "d3964f9dad9f60363c81b688324d95b4ec7c8038" 10 | 11 | -------------------------------------------------------------------------------- /CHANGELOG.toml: -------------------------------------------------------------------------------- 1 | # Each key-value mapping is a changelog entry (configuration version -> required changes): 2 | 0 = "This version was never used" 3 | 1 = """The format of the `compatibility` setting has changed from a string \ 4 | (`semver::VersionReq`) to a number (`u16`).""" 5 | -------------------------------------------------------------------------------- /examples/packages/repo/j/pkg.toml: -------------------------------------------------------------------------------- 1 | name = "j" 2 | version = "10" 3 | 4 | [dependencies] 5 | runtime = ["s =19.0", "s =19.1", "s =19.2", "s =19.3", "t =20"] 6 | 7 | [sources.src] 8 | url = "https://example.com" 9 | hash.hash = "4143d3a341877154d6e95211464e1df1015b74bd" 10 | 11 | -------------------------------------------------------------------------------- /migrations/2021-02-22-092534_add-release-stores/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here CREATE TABLE release_stores ( id SERIAL PRIMARY KEY NOT NULL, store_name VARCHAR(255) NOT NULL UNIQUE ); ALTER TABLE releases ADD COLUMN release_store_id INTEGER REFERENCES release_stores(id) NOT NULL; -------------------------------------------------------------------------------- /migrations/2021-02-15-093619_rm-job-input-artefacts/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` CREATE TABLE job_input_artifacts ( id SERIAL PRIMARY KEY NOT NULL, job_id INTEGER REFERENCES jobs(id) NOT NULL, artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, CONSTRAINT UC_jobid_artifactid UNIQUE (job_id, artifact_id) ) -------------------------------------------------------------------------------- /migrations/2021-02-15-094104_rm-job-output-artefacts/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` CREATE TABLE job_output_artifacts ( id SERIAL PRIMARY KEY NOT NULL, job_id INTEGER REFERENCES jobs(id) NOT NULL, artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, CONSTRAINT UC_jobid_output_artifactid UNIQUE (job_id, artifact_id) ) -------------------------------------------------------------------------------- /src/package/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | 12 | -------------------------------------------------------------------------------- /migrations/00000000000000_diesel_initial_setup/down.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass); 6 | DROP FUNCTION IF EXISTS diesel_set_updated_at(); 7 | -------------------------------------------------------------------------------- /migrations/2021-01-07-122618_dedicated-releases-table/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | ALTER TABLE 3 | artifacts 4 | DROP COLUMN 5 | released; 6 | 7 | CREATE TABLE releases ( 8 | id SERIAL PRIMARY KEY NOT NULL, 9 | artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, 10 | release_date TIMESTAMP WITH TIME ZONE NOT NULL, 11 | 12 | CONSTRAINT UC_art_release_unique UNIQUE (artifact_id, release_date) 13 | ); 14 | -------------------------------------------------------------------------------- /migrations/2020-10-29-143003_create_submit_envs/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | -------------------------------------------------------------------------------- /src/log/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod parser; 12 | pub use parser::*; 13 | 14 | mod item; 15 | pub use item::*; 16 | 17 | mod util; 18 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142443_create_jobs/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE jobs 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-132821_create_envvars/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE envvars 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140838_create_packages/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE packages 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140956_create_images/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE images 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-141023_create_submits/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE submits 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142914_create_job_envs/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE job_envs 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140235_create_githashes/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE githashes 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-141904_create_endpoints/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE endpoints 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142339_create_artifacts/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE artifacts 13 | -------------------------------------------------------------------------------- /src/orchestrator/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | #![allow(clippy::module_inception)] 12 | mod orchestrator; 13 | pub use orchestrator::*; 14 | 15 | mod util; 16 | -------------------------------------------------------------------------------- /src/repository/fs/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod representation; 12 | pub use representation::FileSystemRepresentation; 13 | 14 | mod element; 15 | mod path; 16 | -------------------------------------------------------------------------------- /migrations/2020-11-11-111013_job_output_artifacts/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE job_output_artifacts 13 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142619_create_job_input_artifacts/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | DROP TABLE job_input_artifacts 13 | -------------------------------------------------------------------------------- /migrations/2020-11-05-125716_drop_submit_buildplan/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | submits 14 | DROP COLUMN 15 | buildplan 16 | -------------------------------------------------------------------------------- /migrations/2020-11-11-111326_job_has_multiple_outputs/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | jobs 14 | DROP COLUMN 15 | artifact_id 16 | -------------------------------------------------------------------------------- /migrations/2020-11-13-161449_add_job_uuid/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | jobs 14 | ADD COLUMN 15 | uuid UUID NOT NULL UNIQUE 16 | -------------------------------------------------------------------------------- /src/repository/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | #![allow(clippy::module_inception)] 12 | mod repository; 13 | pub use repository::*; 14 | 15 | mod fs; 16 | mod pkg_toml_source; 17 | -------------------------------------------------------------------------------- /migrations/2020-11-13-161449_add_job_uuid/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | jobs 14 | DROP COLUMN 15 | uuid 16 | -------------------------------------------------------------------------------- /src/db/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod connection; 12 | pub use connection::*; 13 | 14 | mod find_artifacts; 15 | pub use find_artifacts::FindArtifacts; 16 | 17 | pub mod models; 18 | -------------------------------------------------------------------------------- /migrations/2020-12-03-084509_artifact_release_flag/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | artifacts 14 | ADD COLUMN 15 | released boolean NOT NULL 16 | -------------------------------------------------------------------------------- /.github/workflows/commit-lint.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | 4 | name: Pull Request Checks 5 | 6 | jobs: 7 | commit-lint: 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v6 12 | with: 13 | fetch-depth: 0 14 | - uses: actions/setup-python@v6 15 | with: 16 | python-version: '3.x' 17 | - run: pip install gitlint 18 | - run: | 19 | gitlint \ 20 | --commits "$(git merge-base origin/master HEAD)..HEAD" 21 | -------------------------------------------------------------------------------- /migrations/2020-12-03-084509_artifact_release_flag/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | artifacts 14 | DROP COLUMN 15 | released 16 | -------------------------------------------------------------------------------- /migrations/2020-12-03-085333_artifact_belongs_to_job/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | artifacts 14 | DROP COLUMN 15 | job_id 16 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140956_create_images/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE images ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | name VARCHAR NOT NULL UNIQUE 15 | ) 16 | -------------------------------------------------------------------------------- /migrations/2020-12-03-085333_artifact_belongs_to_job/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | artifacts 14 | ADD COLUMN 15 | job_id INTEGER REFERENCES jobs(id) NOT NULL 16 | -------------------------------------------------------------------------------- /src/filestore/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod release; 12 | pub use release::*; 13 | 14 | mod staging; 15 | pub use staging::*; 16 | 17 | pub mod path; 18 | pub use path::ArtifactPath; 19 | 20 | mod util; 21 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140235_create_githashes/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE githashes ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | hash VARCHAR(64) NOT NULL UNIQUE 15 | ) 16 | -------------------------------------------------------------------------------- /migrations/2020-10-29-141904_create_endpoints/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE endpoints ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | name VARCHAR NOT NULL UNIQUE 15 | ) 16 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142339_create_artifacts/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE artifacts ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | path VARCHAR NOT NULL UNIQUE 15 | ) 16 | -------------------------------------------------------------------------------- /migrations/2020-11-05-125716_drop_submit_buildplan/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | submits 14 | ADD COLUMN 15 | buildplan JSONB NOT NULL; 16 | -------------------------------------------------------------------------------- /migrations/2020-12-14-100454_artifact-unique-by-job/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | artifacts 14 | DROP CONSTRAINT 15 | path_job_id_unique 16 | -------------------------------------------------------------------------------- /migrations/2020-12-14-100454_artifact-unique-by-job/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | artifacts 14 | ADD CONSTRAINT 15 | path_job_id_unique 16 | UNIQUE (path, job_id) 17 | -------------------------------------------------------------------------------- /src/endpoint/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod configuration; 12 | pub use configuration::*; 13 | 14 | mod scheduler; 15 | pub use scheduler::*; 16 | 17 | mod configured; 18 | pub use configured::*; 19 | 20 | pub mod util; 21 | -------------------------------------------------------------------------------- /migrations/2020-12-14-113756_artifact_not_unique_by_path/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | ALTER TABLE 13 | artifacts 14 | DROP CONSTRAINT 15 | artifacts_path_key -- as generated by default for postgresql 16 | -------------------------------------------------------------------------------- /migrations/2020-11-11-111326_job_has_multiple_outputs/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | jobs 14 | ADD COLUMN 15 | artifact_id INTEGER REFERENCES artifacts(id) NOT NULL 16 | -------------------------------------------------------------------------------- /src/job/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | #[allow(clippy::module_inception)] 12 | mod job; 13 | pub use job::*; 14 | 15 | mod dag; 16 | pub use dag::*; 17 | 18 | mod resource; 19 | pub use resource::*; 20 | 21 | mod runnable; 22 | pub use runnable::*; 23 | -------------------------------------------------------------------------------- /migrations/2020-12-14-113756_artifact_not_unique_by_path/down.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- This file should undo anything in `up.sql` 12 | ALTER TABLE 13 | artifacts 14 | ADD CONSTRAINT 15 | artifacts_path_key -- as generated by default for postgresql 16 | UNIQUE (path) 17 | 18 | -------------------------------------------------------------------------------- /migrations/2020-10-29-132821_create_envvars/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE envvars ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | name VARCHAR NOT NULL, 15 | value VARCHAR NOT NULL, 16 | 17 | CONSTRAINT UC_name_value UNIQUE (name, value) 18 | ) 19 | -------------------------------------------------------------------------------- /migrations/2020-10-29-140838_create_packages/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE packages ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | name VARCHAR NOT NULL, 15 | version VARCHAR NOT NULL, 16 | 17 | CONSTRAINT UC_name_version UNIQUE (name, version) 18 | ) 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## What 4 | 5 | * [ ] Bug 6 | * [ ] Feature Request 7 | * [ ] Question 8 | * [ ] Other 9 | 10 | 11 | ## Version 12 | 13 | **Butido Version:** 14 | **Rust Version:** 15 | **Postgres Version:** 16 | **Docker Version:** 17 | **OS I'm running on:** 18 | 19 | 20 | For the lazy: 21 | 22 | ```bash 23 | butido --version 24 | rustc --version 25 | docker --version 26 | uname -a 27 | cat /etc/os-release 28 | ``` 29 | 30 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142914_create_job_envs/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE job_envs ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | job_id INTEGER REFERENCES jobs(id) NOT NULL, 15 | env_id INTEGER REFERENCES envvars(id) NOT NULL, 16 | 17 | CONSTRAINT UC_jobid_envid UNIQUE (job_id, env_id) 18 | ) 19 | -------------------------------------------------------------------------------- /migrations/2020-10-29-143003_create_submit_envs/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE submit_envs ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | submit_id INTEGER REFERENCES submits(id) NOT NULL, 15 | env_id INTEGER REFERENCES envvars(id) NOT NULL, 16 | 17 | CONSTRAINT UC_submitid_envid UNIQUE (submit_id, env_id) 18 | ) 19 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142619_create_job_input_artifacts/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE job_input_artifacts ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | job_id INTEGER REFERENCES jobs(id) NOT NULL, 15 | artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, 16 | 17 | CONSTRAINT UC_jobid_artifactid UNIQUE (job_id, artifact_id) 18 | ) 19 | -------------------------------------------------------------------------------- /migrations/2020-11-11-111013_job_output_artifacts/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE job_output_artifacts ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | job_id INTEGER REFERENCES jobs(id) NOT NULL, 15 | artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, 16 | 17 | CONSTRAINT UC_jobid_output_artifactid UNIQUE (job_id, artifact_id) 18 | ) 19 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { example ? "1", ... }: 2 | 3 | let 4 | moz_overlay = import ( 5 | builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz 6 | ); 7 | 8 | pkgs = import { overlays = [ moz_overlay ]; }; 9 | 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | rustChannels.stable.rust-std 14 | rustChannels.stable.rust 15 | rustChannels.stable.rustc 16 | rustChannels.stable.cargo 17 | 18 | diesel-cli 19 | pgcli 20 | postgresql 21 | 22 | cmake 23 | curl 24 | gcc 25 | openssl 26 | pkgconfig 27 | which 28 | zlib 29 | ]; 30 | 31 | LIBCLANG_PATH = "${pkgs.llvmPackages.libclang}/lib"; 32 | } 33 | 34 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: monthly 7 | open-pull-requests-limit: 30 8 | rebase-strategy: "disabled" 9 | ignore: 10 | # Ignore all patch updates for version updates only (we'll pull in SemVer 11 | # compatible updates in batches using `cargo update` while dependabot's 12 | # task is to perform "major" updates that require changes to `Cargo.toml` 13 | # as well as security updates): 14 | - dependency-name: "*" 15 | update-types: ["version-update:semver-patch"] 16 | - package-ecosystem: github-actions 17 | directory: "/" 18 | schedule: 19 | interval: monthly 20 | rebase-strategy: "disabled" 21 | -------------------------------------------------------------------------------- /scripts/dev-pg-container.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | die() { 4 | echo >&2 "$*" 5 | exit 1 6 | } 7 | 8 | [ -z "$PG_USER" ] && die "Not set: PG_USER" 9 | [ -z "$PG_PW" ] && die "Not set: PG_PW" 10 | [ -z "$PG_DB" ] && die "Not set: PG_DB" 11 | [ -z "$PG_CONTAINER_NAME" ] && die "Not set: PG_CONTAINER_NAME" 12 | 13 | docker run \ 14 | --name ${PG_CONTAINER_NAME} \ 15 | -e POSTGRES_PASSWORD=${PG_PW} \ 16 | -p 5432:5432 \ 17 | -m 512m \ 18 | -d \ 19 | --rm \ 20 | postgres 21 | 22 | sleep 2 23 | docker exec -it ${PG_CONTAINER_NAME} psql -U postgres -c "CREATE USER ${PG_USER} PASSWORD '${PG_PW}' SUPERUSER CREATEDB INHERIT LOGIN" 24 | sleep 2 25 | docker exec -it ${PG_CONTAINER_NAME} createdb -U postgres butido 26 | 27 | echo "DONE" 28 | -------------------------------------------------------------------------------- /src/config/configuration.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::ops::Deref; 12 | 13 | use crate::config::NotValidatedConfiguration; 14 | 15 | /// A valid configuration (validated via NotValidatedConfiguration::validate()) 16 | #[derive(Debug)] 17 | pub struct Configuration { 18 | pub(in crate::config) inner: NotValidatedConfiguration, 19 | } 20 | 21 | impl Deref for Configuration { 22 | type Target = NotValidatedConfiguration; 23 | 24 | fn deref(&self) -> &Self::Target { 25 | &self.inner 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/package/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Module that contains all types and functionality that has to do with a package. 12 | 13 | mod dependency; 14 | pub use dependency::*; 15 | 16 | mod name; 17 | pub use name::*; 18 | 19 | #[allow(clippy::module_inception)] 20 | mod package; 21 | pub use package::*; 22 | 23 | mod phase; 24 | pub use phase::*; 25 | 26 | mod script; 27 | pub use script::*; 28 | 29 | mod source; 30 | pub use source::*; 31 | 32 | mod dag; 33 | pub use dag::*; 34 | 35 | mod version; 36 | pub use version::*; 37 | -------------------------------------------------------------------------------- /migrations/2020-10-29-141023_create_submits/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE submits ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | uuid UUID NOT NULL UNIQUE, 15 | submit_time TIMESTAMP WITH TIME ZONE NOT NULL, 16 | 17 | requested_image_id INTEGER REFERENCES images(id) NOT NULL, 18 | requested_package_id INTEGER REFERENCES packages(id) NOT NULL, 19 | repo_hash_id INTEGER REFERENCES githashes(id) NOT NULL, 20 | 21 | tree JSONB NOT NULL, 22 | buildplan JSONB NOT NULL 23 | ) 24 | -------------------------------------------------------------------------------- /src/db/models/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod artifact; 12 | pub use artifact::*; 13 | 14 | mod endpoint; 15 | pub use endpoint::*; 16 | 17 | mod envvar; 18 | pub use envvar::*; 19 | 20 | mod image; 21 | pub use image::*; 22 | 23 | mod job; 24 | pub use job::*; 25 | 26 | mod job_env; 27 | pub use job_env::*; 28 | 29 | mod githash; 30 | pub use githash::*; 31 | 32 | mod package; 33 | pub use package::*; 34 | 35 | mod releases; 36 | pub use releases::*; 37 | 38 | mod release_store; 39 | pub use release_store::*; 40 | 41 | mod submit; 42 | pub use submit::*; 43 | -------------------------------------------------------------------------------- /migrations/2020-10-29-142443_create_jobs/up.sql: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2020-2022 science+computing ag and other contributors 3 | -- 4 | -- This program and the accompanying materials are made 5 | -- available under the terms of the Eclipse Public License 2.0 6 | -- which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | -- 8 | -- SPDX-License-Identifier: EPL-2.0 9 | -- 10 | 11 | -- Your SQL goes here 12 | CREATE TABLE jobs ( 13 | id SERIAL PRIMARY KEY NOT NULL, 14 | 15 | submit_id INTEGER REFERENCES submits(id) NOT NULL, 16 | endpoint_id INTEGER REFERENCES endpoints(id) NOT NULL, 17 | package_id INTEGER REFERENCES packages(id) NOT NULL, 18 | image_id INTEGER REFERENCES images(id) NOT NULL, 19 | artifact_id INTEGER REFERENCES artifacts(id) NOT NULL, 20 | 21 | container_hash VARCHAR NOT NULL, 22 | script_text TEXT NOT NULL, 23 | log_text TEXT NOT NULL 24 | ) 25 | -------------------------------------------------------------------------------- /src/endpoint/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::sync::Arc; 12 | 13 | use anyhow::Result; 14 | use futures::FutureExt; 15 | use tokio_stream::StreamExt; 16 | 17 | use crate::endpoint::Endpoint; 18 | use crate::endpoint::EndpointConfiguration; 19 | 20 | pub async fn setup_endpoints(endpoints: Vec) -> Result>> { 21 | let unordered = futures::stream::FuturesUnordered::new(); 22 | 23 | for cfg in endpoints.into_iter() { 24 | unordered.push(Endpoint::setup(cfg).map(|r_ep| r_ep.map(Arc::new))); 25 | } 26 | 27 | unordered.collect().await 28 | } 29 | -------------------------------------------------------------------------------- /src/util/git.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::anyhow; 12 | use anyhow::Context; 13 | use anyhow::Result; 14 | use git2::Repository; 15 | use tracing::trace; 16 | 17 | pub fn get_repo_head_commit_hash(r: &Repository) -> Result { 18 | let s = r 19 | .head() 20 | .with_context(|| anyhow!("Getting HEAD from repository at {}", r.path().display()))? 21 | .peel_to_commit() 22 | .with_context(|| anyhow!("Failed to get commit hash: Not valid UTF8"))? 23 | .id() 24 | .to_string(); 25 | 26 | trace!("Found git commit hash = {}", s); 27 | Ok(s) 28 | } 29 | -------------------------------------------------------------------------------- /src/consts.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | /// The path to the directory inside the container where the inputs for the script run are copied 12 | /// to. 13 | pub const INPUTS_DIR_PATH: &str = "/inputs"; 14 | 15 | /// The path to the directory inside the container where the outputs of a compile job must be 16 | /// located after the script was run 17 | pub const OUTPUTS_DIR_PATH: &str = "/outputs"; 18 | pub const OUTPUTS_DIR_NAME: &str = "outputs"; 19 | 20 | pub const PATCH_DIR_PATH: &str = "/patches"; 21 | 22 | /// The path where the script that is executed inside the container is copied to. 23 | pub const SCRIPT_PATH: &str = "/script"; 24 | -------------------------------------------------------------------------------- /src/util/env.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::anyhow; 12 | use anyhow::Result; 13 | 14 | use crate::util::EnvironmentVariableName; 15 | 16 | pub fn parse_to_env(s: &str) -> Result<(EnvironmentVariableName, String)> { 17 | let v = s.split('=').collect::>(); 18 | Ok(( 19 | EnvironmentVariableName::from( 20 | *v.first() 21 | .ok_or_else(|| anyhow!("Environment variable has no key: {s}"))?, 22 | ), 23 | String::from( 24 | *v.get(1) 25 | .ok_or_else(|| anyhow!("Environment variable has no key: {s}"))?, 26 | ), 27 | )) 28 | } 29 | -------------------------------------------------------------------------------- /doc/containers.md: -------------------------------------------------------------------------------- 1 | ## Containers 2 | 3 | The containers you use to run your builds are handled the following way: 4 | 5 | 1. Dependencies and sources are copied to the container at `/inputs`, 6 | the compiled packaging script is copied to the container at `/script` 7 | 2. The script is started 8 | 3. The result artifacts are copied from `/outputs` to the staging store 9 | 10 | 11 | ### Conventions 12 | 13 | There are some conventions regarding packages, dependencies, sources and so 14 | on. Those are listed here. 15 | 16 | 1. Dependencies are named `/inputs/-.pkg` inside the container 17 | 2. Sources are named `/inputs/src.source` 18 | 3. Outputs are expected to be written to the `/outputs` directory 19 | 20 | The reason for the names lies in the artifact parsing mechanism. 21 | If the package is named differently, the artifact parsing mechanism is not able 22 | to recognize the package and might fault, which causes butido to stop running. 23 | -------------------------------------------------------------------------------- /examples/packages/Makefile: -------------------------------------------------------------------------------- 1 | export BUTIDO_RELEASES="/tmp/butido-test-releases" 2 | export BUTIDO_STAGING="/tmp/butido-test-staging" 3 | export BUTIDO_SOURCE_CACHE="/tmp/butido-test-sources" 4 | export BUTIDO_LOG_DIR="/tmp/butido-test-logs" 5 | export BUTIDO_REPO="/tmp/butido-test-repo" 6 | 7 | .PHONY: all 8 | all: directories copyrepo copysrc 9 | 10 | directories: ${BUTIDO_RELEASES} ${BUTIDO_STAGING} ${BUTIDO_SOURCE_CACHE} ${BUTIDO_LOG_DIR} ${BUTIDO_REPO} 11 | 12 | copyrepo: ${BUTIDO_REPO} 13 | cp -rv ./repo/* ${BUTIDO_REPO}/ 14 | cd ${BUTIDO_REPO}/ && git init && git add . && git commit -m init 15 | 16 | copysrc: ${BUTIDO_SOURCE_CACHE} 17 | cp -rv ./sources/* ${BUTIDO_SOURCE_CACHE}/ 18 | 19 | ${BUTIDO_RELEASES}: 20 | mkdir -p "${BUTIDO_RELEASES}/default" 21 | 22 | ${BUTIDO_STAGING}: 23 | mkdir -p "${BUTIDO_STAGING}" 24 | 25 | ${BUTIDO_SOURCE_CACHE}: 26 | mkdir -p "${BUTIDO_SOURCE_CACHE}" 27 | 28 | ${BUTIDO_LOG_DIR}: 29 | mkdir -p "${BUTIDO_LOG_DIR}" 30 | 31 | ${BUTIDO_REPO}: 32 | mkdir -p "${BUTIDO_REPO}" 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/package/phase.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::path::PathBuf; 12 | 13 | use serde::Deserialize; 14 | use serde::Serialize; 15 | 16 | #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] 17 | #[serde(transparent)] 18 | pub struct PhaseName(String); 19 | 20 | impl PhaseName { 21 | pub fn as_str(&self) -> &str { 22 | &self.0 23 | } 24 | } 25 | 26 | #[cfg(test)] 27 | impl From for PhaseName { 28 | fn from(s: String) -> Self { 29 | PhaseName(s) 30 | } 31 | } 32 | 33 | #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] 34 | pub enum Phase { 35 | #[serde(rename = "path")] 36 | Path(PathBuf), 37 | 38 | #[serde(rename = "script")] 39 | Text(String), 40 | } 41 | -------------------------------------------------------------------------------- /src/endpoint/configuration.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::Getters; 12 | use typed_builder::TypedBuilder; 13 | 14 | use crate::util::docker::ImageName; 15 | 16 | #[derive(Getters, TypedBuilder)] 17 | pub struct EndpointConfiguration { 18 | #[getset(get = "pub")] 19 | endpoint_name: crate::config::EndpointName, 20 | 21 | #[getset(get = "pub")] 22 | endpoint: crate::config::Endpoint, 23 | 24 | #[getset(get = "pub")] 25 | #[builder(default)] 26 | required_images: Vec, 27 | 28 | #[getset(get = "pub")] 29 | #[builder(default)] 30 | required_docker_versions: Option>, 31 | 32 | #[getset(get = "pub")] 33 | #[builder(default)] 34 | required_docker_api_versions: Option>, 35 | } 36 | -------------------------------------------------------------------------------- /src/util/progress.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::CopyGetters; 12 | use indicatif::*; 13 | 14 | #[derive(Clone, Debug, CopyGetters)] 15 | pub struct ProgressBars { 16 | bar_template: String, 17 | 18 | #[getset(get_copy = "pub")] 19 | hide: bool, 20 | } 21 | 22 | impl ProgressBars { 23 | pub fn setup(bar_template: String, hide: bool) -> Self { 24 | ProgressBars { bar_template, hide } 25 | } 26 | 27 | pub fn bar(&self) -> anyhow::Result { 28 | if self.hide { 29 | Ok(ProgressBar::hidden()) 30 | } else { 31 | let b = ProgressBar::new(1); 32 | b.set_style(ProgressStyle::default_bar().template(&self.bar_template)?); 33 | Ok(b) 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/repository/fs/element.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::collections::HashMap; 12 | 13 | use crate::repository::fs::path::PathComponent; 14 | 15 | /// One element in the tree inside FileSystemRepresentation 16 | /// 17 | /// This is either a File, or a Directory that contains more (Files or Directories). 18 | #[derive(Debug)] 19 | pub enum Element { 20 | File(String), 21 | Dir(HashMap), 22 | } 23 | 24 | impl Element { 25 | /// Helper fn to get the directory contents of the element, if the element is an Element::Dir 26 | pub fn get_map_mut(&mut self) -> Option<&mut HashMap> { 27 | match self { 28 | Element::File(_) => None, 29 | Element::Dir(hm) => Some(hm), 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | mod build; 12 | pub use build::build; 13 | 14 | mod db; 15 | pub use db::db; 16 | 17 | mod endpoint; 18 | pub use endpoint::endpoint; 19 | pub(super) mod endpoint_container; 20 | 21 | mod env_of; 22 | pub use env_of::env_of; 23 | 24 | mod find_artifact; 25 | pub use find_artifact::find_artifact; 26 | 27 | mod find_pkg; 28 | pub use find_pkg::find_pkg; 29 | 30 | mod dependencies_of; 31 | pub use dependencies_of::dependencies_of; 32 | 33 | mod lint; 34 | pub use lint::lint; 35 | 36 | mod what_depends; 37 | pub use what_depends::what_depends; 38 | 39 | mod release; 40 | pub use release::release; 41 | 42 | mod source; 43 | pub use source::source; 44 | 45 | mod versions_of; 46 | pub use versions_of::versions_of; 47 | 48 | mod tree_of; 49 | pub use tree_of::tree_of; 50 | 51 | mod metrics; 52 | pub use metrics::metrics; 53 | 54 | mod util; 55 | -------------------------------------------------------------------------------- /src/config/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! The configuration handling code 12 | //! 13 | //! This module contains all code for the configuration of butido itself. 14 | //! 15 | //! Please note that the `not_validated` module is the "entry point". 16 | //! A "NotValidatedConfiguration" is loaded from the filesystem and then transformed into a 17 | //! `Configuration` object via the `validate()` method. 18 | //! 19 | //! This mechanism is chosen because we might want to be able to do validation on the configuration 20 | //! that is not possible to do with TOML itself. 21 | //! 22 | 23 | mod configuration; 24 | pub use configuration::*; 25 | 26 | mod container_config; 27 | pub use container_config::*; 28 | 29 | mod docker_config; 30 | pub use docker_config::*; 31 | 32 | mod endpoint_config; 33 | pub use endpoint_config::*; 34 | 35 | mod not_validated; 36 | pub use not_validated::*; 37 | 38 | mod util; 39 | -------------------------------------------------------------------------------- /src/log/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use shiplift::tty::TtyChunk; 12 | 13 | #[allow(clippy::enum_variant_names)] 14 | pub enum TtyChunkBuf { 15 | StdIn(Vec), 16 | StdOut(Vec), 17 | StdErr(Vec), 18 | } 19 | 20 | impl From for TtyChunkBuf { 21 | fn from(c: TtyChunk) -> Self { 22 | match c { 23 | TtyChunk::StdIn(buffer) => TtyChunkBuf::StdIn(buffer), 24 | TtyChunk::StdOut(buffer) => TtyChunkBuf::StdOut(buffer), 25 | TtyChunk::StdErr(buffer) => TtyChunkBuf::StdErr(buffer), 26 | } 27 | } 28 | } 29 | 30 | impl AsRef<[u8]> for TtyChunkBuf { 31 | fn as_ref(&self) -> &[u8] { 32 | match self { 33 | TtyChunkBuf::StdIn(buffer) => buffer.as_ref(), 34 | TtyChunkBuf::StdOut(buffer) => buffer.as_ref(), 35 | TtyChunkBuf::StdErr(buffer) => buffer.as_ref(), 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /.gitlint: -------------------------------------------------------------------------------- 1 | # Documentation: https://jorisroovers.com/gitlint/configuration/ 2 | 3 | [general] 4 | # Make body messages optional: 5 | ignore=body-is-missing 6 | # At this time, regex-style-search is disabled by default, but it will be 7 | # enabled by default in the future. We already enable it here to avoid a 8 | # warning message (our regular expressions are compatible with re.search()): 9 | regex-style-search=true 10 | # Don't ignore temporary commits (they can be useful for drafts but we shall 11 | # not accidentally merge them (this is mainly important for our CI checks)): 12 | ignore-fixup-commits=false 13 | ignore-fixup-amend-commits=false 14 | ignore-squash-commits=false 15 | 16 | # Enable community contributed rules 17 | # See http://jorisroovers.github.io/gitlint/contrib_rules for details 18 | contrib=contrib-body-requires-signed-off-by,contrib-disallow-cleanup-commits 19 | 20 | [ignore-by-author-name] 21 | # Ignore certain rules for commits of which the author name matches a regex 22 | # Match commits made by dependabot: 23 | regex=(.*)dependabot(.*) 24 | 25 | [ignore-body-lines] 26 | # Ignore long hyperlinks (http and https). The URLs must start at the beginning 27 | # of a line or use the Markdown format (e.g. "[10]: $URL"): 28 | regex=^(\[[0-9]+\]: )?https?:// 29 | -------------------------------------------------------------------------------- /src/util/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::io::IsTerminal; 12 | 13 | use serde::Deserialize; 14 | use serde::Serialize; 15 | 16 | #[derive( 17 | parse_display::Display, 18 | Serialize, 19 | Deserialize, 20 | Clone, 21 | Debug, 22 | Hash, 23 | Eq, 24 | PartialEq, 25 | Ord, 26 | PartialOrd, 27 | )] 28 | #[serde(transparent)] 29 | #[display("{0}")] 30 | pub struct EnvironmentVariableName(String); 31 | 32 | impl From<&str> for EnvironmentVariableName { 33 | fn from(s: &str) -> EnvironmentVariableName { 34 | EnvironmentVariableName(s.to_string()) 35 | } 36 | } 37 | 38 | impl AsRef for EnvironmentVariableName { 39 | fn as_ref(&self) -> &str { 40 | self.0.as_ref() 41 | } 42 | } 43 | 44 | pub mod docker; 45 | pub mod env; 46 | pub mod filters; 47 | pub mod git; 48 | pub mod parser; 49 | pub mod progress; 50 | 51 | pub fn stdout_is_pipe() -> bool { 52 | !std::io::stdout().is_terminal() 53 | } 54 | -------------------------------------------------------------------------------- /src/orchestrator/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::collections::HashMap; 12 | 13 | use anyhow::Error; 14 | use uuid::Uuid; 15 | 16 | /// Get a `Display`able interface for a Map of errors 17 | /// 18 | /// This is a helper trait for be able to display a `HashMap` 19 | /// in a `tracing::trace!()` call, for example 20 | pub trait AsReceivedErrorDisplay { 21 | fn display_error_map(&self) -> ReceivedErrorDisplay<'_>; 22 | } 23 | 24 | impl AsReceivedErrorDisplay for HashMap { 25 | fn display_error_map(&self) -> ReceivedErrorDisplay<'_> { 26 | ReceivedErrorDisplay(self) 27 | } 28 | } 29 | 30 | pub struct ReceivedErrorDisplay<'a>(&'a HashMap); 31 | 32 | impl std::fmt::Display for ReceivedErrorDisplay<'_> { 33 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 34 | self.0 35 | .iter() 36 | .try_for_each(|(uuid, err)| writeln!(f, "{uuid}: {err}")) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /migrations/00000000000000_diesel_initial_setup/up.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | 6 | 7 | 8 | -- Sets up a trigger for the given table to automatically set a column called 9 | -- `updated_at` whenever the row is modified (unless `updated_at` was included 10 | -- in the modified columns) 11 | -- 12 | -- # Example 13 | -- 14 | -- ```sql 15 | -- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW()); 16 | -- 17 | -- SELECT diesel_manage_updated_at('users'); 18 | -- ``` 19 | CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$ 20 | BEGIN 21 | EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s 22 | FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl); 23 | END; 24 | $$ LANGUAGE plpgsql; 25 | 26 | CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$ 27 | BEGIN 28 | IF ( 29 | NEW IS DISTINCT FROM OLD AND 30 | NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at 31 | ) THEN 32 | NEW.updated_at := current_timestamp; 33 | END IF; 34 | RETURN NEW; 35 | END; 36 | $$ LANGUAGE plpgsql; 37 | -------------------------------------------------------------------------------- /src/filestore/release.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::fmt::Debug; 12 | 13 | use anyhow::Result; 14 | use indicatif::ProgressBar; 15 | 16 | use crate::filestore::path::ArtifactPath; 17 | use crate::filestore::path::StoreRoot; 18 | use crate::filestore::util::FileStoreImpl; 19 | 20 | // The implementation of this type must be available in the merged filestore. 21 | pub struct ReleaseStore(pub(in crate::filestore) FileStoreImpl); 22 | 23 | impl Debug for ReleaseStore { 24 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { 25 | write!(f, "ReleaseStore(root: {})", self.0.root_path().display()) 26 | } 27 | } 28 | 29 | impl ReleaseStore { 30 | pub fn load(root: StoreRoot, progress: &ProgressBar) -> Result { 31 | FileStoreImpl::load(root, progress).map(ReleaseStore) 32 | } 33 | 34 | pub fn root_path(&self) -> &StoreRoot { 35 | self.0.root_path() 36 | } 37 | 38 | pub fn get(&self, p: &ArtifactPath) -> Option<&ArtifactPath> { 39 | self.0.get(p) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/db/models/job_env.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Result; 12 | use diesel::prelude::*; 13 | 14 | use crate::db::models::EnvVar; 15 | use crate::db::models::Job; 16 | use crate::schema::job_envs; 17 | 18 | #[derive(Identifiable, Queryable, Associations)] 19 | #[diesel(belongs_to(Job))] 20 | #[diesel(belongs_to(EnvVar, foreign_key = env_id))] 21 | #[diesel(table_name = job_envs)] 22 | pub struct JobEnv { 23 | pub id: i32, 24 | pub job_id: i32, 25 | pub env_id: i32, 26 | } 27 | 28 | #[derive(Insertable)] 29 | #[diesel(table_name = job_envs)] 30 | struct NewJobEnv { 31 | pub job_id: i32, 32 | pub env_id: i32, 33 | } 34 | 35 | impl JobEnv { 36 | pub fn create(database_connection: &mut PgConnection, job: &Job, env: &EnvVar) -> Result<()> { 37 | let new_jobenv = NewJobEnv { 38 | job_id: job.id, 39 | env_id: env.id, 40 | }; 41 | 42 | diesel::insert_into(job_envs::table) 43 | .values(&new_jobenv) 44 | .execute(database_connection)?; 45 | Ok(()) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/config/container_config.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::CopyGetters; 12 | use getset::Getters; 13 | use serde::Deserialize; 14 | 15 | use crate::util::EnvironmentVariableName; 16 | 17 | /// The configuration for the containers 18 | #[derive(Debug, CopyGetters, Getters, Deserialize)] 19 | #[serde(deny_unknown_fields)] 20 | pub struct ContainerConfig { 21 | /// Whether to check if environment variables are allowed (i.e., if their 22 | /// names are listed in `allowed_env`). 23 | #[getset(get_copy = "pub")] 24 | check_env_names: bool, 25 | 26 | /// Allowed environment variables (names) 27 | #[getset(get = "pub")] 28 | allowed_env: Vec, 29 | 30 | /// Pass the current Git author to the container 31 | /// This can be used for the "packager" name in a package, for example 32 | #[getset(get = "pub")] 33 | git_author: Option, 34 | 35 | /// Pass the current Git hash to the container 36 | #[getset(get = "pub")] 37 | git_commit_hash: Option, 38 | } 39 | -------------------------------------------------------------------------------- /examples/packages/repo/pkg.toml: -------------------------------------------------------------------------------- 1 | version_is_semver = false 2 | patches = [] 3 | 4 | [dependencies] 5 | build = [] 6 | runtime = [] 7 | 8 | [sources.src] 9 | hash.type = "sha1" 10 | 11 | [phases] 12 | 13 | sourcecheck.script = ''' 14 | filename="/inputs/src.source" 15 | [[ -e $filename ]] || { 16 | echo "MISSING: $filename" 17 | {{state "ERR" "Missing input"}} 18 | exit 1 19 | } 20 | ''' 21 | 22 | patchcheck.script = ''' 23 | {{#if this.patches[0]}} 24 | {{#each this.patches}} 25 | if [[ ! -e "/patches/{{this}}" ]]; then 26 | echo "Does not exist: /patches/{{this}}" 27 | {{state "ERR" "Missing patch"}} 28 | exit 1 29 | fi 30 | {{/each}} 31 | {{/if}} 32 | ''' 33 | 34 | depcheck.script = ''' 35 | {{#each this.dependencies.runtime}} 36 | # Try to find sha of dependency {{this}} 37 | exp_sha="$(echo {{this}} | sed 's,.*\ =,,' | sha1sum | sed 's,\ \ -,,')" 38 | sha1sum /inputs/*pkg | grep "$exp_sha" || { 39 | echo "FAILED TO FIND SHA: $exp_sha" 40 | {{state "ERR" "Failed to find SHA"}} 41 | exit 1 42 | } 43 | {{/each}} 44 | ''' 45 | 46 | build.script = ''' 47 | mkdir /outputs 48 | echo "{{this.version}}" > /outputs/{{this.name}}-{{this.version}}.pkg 49 | 50 | {{state "OK"}} 51 | ''' 52 | -------------------------------------------------------------------------------- /src/job/resource.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use crate::filestore::ArtifactPath; 12 | use crate::util::EnvironmentVariableName; 13 | 14 | #[derive(Clone, Debug)] 15 | pub enum JobResource { 16 | Environment(EnvironmentVariableName, String), 17 | Artifact(ArtifactPath), 18 | } 19 | 20 | impl From<(EnvironmentVariableName, String)> for JobResource { 21 | fn from(tpl: (EnvironmentVariableName, String)) -> Self { 22 | JobResource::Environment(tpl.0, tpl.1) 23 | } 24 | } 25 | 26 | impl From for JobResource { 27 | fn from(a: ArtifactPath) -> Self { 28 | JobResource::Artifact(a) 29 | } 30 | } 31 | 32 | impl JobResource { 33 | pub fn env(&self) -> Option<(&EnvironmentVariableName, &String)> { 34 | match self { 35 | JobResource::Environment(k, v) => Some((k, v)), 36 | _ => None, 37 | } 38 | } 39 | pub fn artifact(&self) -> Option<&ArtifactPath> { 40 | match self { 41 | JobResource::Artifact(a) => Some(a), 42 | _ => None, 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /examples/packages/repo/config.toml: -------------------------------------------------------------------------------- 1 | # Example configuration file for butido 2 | compatibility = 1 3 | script_highlight_theme = "Solarized (dark)" 4 | 5 | releases_root = "/tmp/butido-test-releases" 6 | release_stores = [ "default" ] 7 | staging = "/tmp/butido-test-staging" 8 | source_cache = "/tmp/butido-test-sources" 9 | log_dir = "/tmp/butido-test-logs" 10 | 11 | 12 | strict_script_interpolation = true 13 | 14 | 15 | # 16 | # 17 | # Log database configuration 18 | # 19 | # 20 | 21 | # Database configuration should be self-explanatory 22 | database_host = "localhost" 23 | database_port = 5432 24 | database_user = "pgdev" 25 | database_password = "password" 26 | database_name = "butido" 27 | 28 | available_phases = [ 29 | "sourcecheck", 30 | "patchcheck", 31 | "depcheck", 32 | "build" 33 | ] 34 | 35 | 36 | [docker] 37 | 38 | # Images which can be used to build 39 | # images not listed here are automatically rejected 40 | images = [ 41 | { name = "debian:bullseye", short_name = "deb11" }, 42 | ] 43 | 44 | # 45 | # Docker endpoints 46 | # 47 | [docker.endpoints.testhostname] 48 | uri = "http://0.0.0.0:8095" # the URI of the endpoint. Either http or socket path 49 | endpoint_type = "http" # either "http" or "socket" 50 | maxjobs = 1 # Maximum number of jobs which are allowed on this endpoint 51 | 52 | 53 | [containers] 54 | check_env_names = true 55 | allowed_env = [ ] 56 | 57 | -------------------------------------------------------------------------------- /src/package/name.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::ops::Deref; 12 | 13 | use pom::parser::Parser as PomParser; 14 | use serde::Deserialize; 15 | use serde::Serialize; 16 | 17 | #[derive( 18 | parse_display::Display, 19 | Serialize, 20 | Deserialize, 21 | Clone, 22 | Debug, 23 | Hash, 24 | Eq, 25 | PartialEq, 26 | Ord, 27 | PartialOrd, 28 | )] 29 | #[serde(transparent)] 30 | #[display("{0}")] 31 | pub struct PackageName(String); 32 | 33 | impl Deref for PackageName { 34 | type Target = String; 35 | fn deref(&self) -> &Self::Target { 36 | &self.0 37 | } 38 | } 39 | 40 | impl AsRef for PackageName { 41 | fn as_ref(&self) -> &str { 42 | &self.0 43 | } 44 | } 45 | 46 | impl From for PackageName { 47 | fn from(s: String) -> Self { 48 | PackageName(s) 49 | } 50 | } 51 | 52 | impl PackageName { 53 | pub fn parser<'a>() -> PomParser<'a, u8, Self> { 54 | use crate::util::parser::*; 55 | (letters() + ((letters() | numbers()).repeat(0..))) 56 | .collect() 57 | .convert(|b| String::from_utf8(b.to_vec()).map(Self::from)) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/commands/versions_of.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'versions_of' subcommand 12 | 13 | use anyhow::Error; 14 | use anyhow::Result; 15 | use clap::ArgMatches; 16 | use tracing::trace; 17 | 18 | use crate::package::PackageName; 19 | use crate::repository::Repository; 20 | 21 | /// Implementation of the "versions_of" subcommand 22 | pub async fn versions_of(matches: &ArgMatches, repo: Repository) -> Result<()> { 23 | use filters::filter::Filter; 24 | use std::io::Write; 25 | 26 | let package_filter = { 27 | let name = matches 28 | .get_one::("package_name") 29 | .map(|s| s.to_owned()) 30 | .map(PackageName::from) 31 | .unwrap(); 32 | trace!("Checking for package with name = {}", name); 33 | 34 | crate::util::filters::build_package_filter_by_name(name) 35 | }; 36 | 37 | let mut stdout = std::io::stdout(); 38 | repo.packages() 39 | .filter(|package| package_filter.filter(package)) 40 | .inspect(|pkg| trace!("Found package: {:?}", pkg)) 41 | .map(|pkg| writeln!(stdout, "{}", pkg.version()).map_err(Error::from)) 42 | .collect::>>() 43 | .map(|_| ()) 44 | } 45 | -------------------------------------------------------------------------------- /src/repository/pkg_toml_source.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020-2024 science+computing ag and other contributors 2 | // 3 | // This program and the accompanying materials are made 4 | // available under the terms of the Eclipse Public License 2.0 5 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 6 | // 7 | // SPDX-License-Identifier: EPL-2.0 8 | 9 | // A custom `Source` implementation for the `config` crate to tack the `pkg.toml` file path as URI/origin 10 | // in addition to the content (basically a replacement for `config::File::from_str(str, format)`). 11 | 12 | use std::path::Path; 13 | 14 | use config::ConfigError; 15 | use config::FileFormat; 16 | use config::Format; 17 | use config::Map; 18 | use config::Source; 19 | use config::Value; 20 | 21 | #[derive(Clone, Debug)] 22 | pub struct PkgTomlSource { 23 | content: String, 24 | uri: String, 25 | } 26 | 27 | impl PkgTomlSource { 28 | pub fn new(path: &Path, content: String) -> Self { 29 | // We could also use `path.to_str()` for proper error handling: 30 | let path = path.to_string_lossy().to_string(); 31 | PkgTomlSource { content, uri: path } 32 | } 33 | } 34 | 35 | impl Source for PkgTomlSource { 36 | fn clone_into_box(&self) -> Box { 37 | Box::new((*self).clone()) 38 | } 39 | 40 | fn collect(&self) -> Result, ConfigError> { 41 | FileFormat::Toml 42 | .parse(Some(&self.uri), &self.content) 43 | .map_err(|cause| ConfigError::FileParse { 44 | uri: Some(self.uri.clone()), 45 | cause, 46 | }) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/db/models/release_store.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use diesel::Connection; 14 | use diesel::ExpressionMethods; 15 | use diesel::PgConnection; 16 | use diesel::QueryDsl; 17 | use diesel::RunQueryDsl; 18 | 19 | use crate::schema; 20 | use crate::schema::release_stores; 21 | 22 | #[derive(Debug, Identifiable, Queryable)] 23 | #[diesel(table_name = release_stores)] 24 | pub struct ReleaseStore { 25 | pub id: i32, 26 | pub store_name: String, 27 | } 28 | 29 | #[derive(Insertable)] 30 | #[diesel(table_name = release_stores)] 31 | struct NewReleaseStore<'a> { 32 | pub store_name: &'a str, 33 | } 34 | 35 | impl ReleaseStore { 36 | pub fn create(database_connection: &mut PgConnection, name: &str) -> Result { 37 | let new_relstore = NewReleaseStore { store_name: name }; 38 | 39 | database_connection.transaction::<_, Error, _>(|conn| { 40 | diesel::insert_into(schema::release_stores::table) 41 | .values(&new_relstore) 42 | .on_conflict_do_nothing() 43 | .execute(conn)?; 44 | 45 | schema::release_stores::table 46 | .filter(schema::release_stores::store_name.eq(name)) 47 | .first::(conn) 48 | .map_err(Error::from) 49 | }) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/job/job.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::Getters; 12 | use uuid::Uuid; 13 | 14 | use crate::job::JobResource; 15 | use crate::package::Package; 16 | use crate::package::PhaseName; 17 | use crate::package::Shebang; 18 | use crate::util::docker::ImageName; 19 | 20 | /// A prepared, but not necessarily runnable, job configuration 21 | #[derive(Debug, Getters)] 22 | pub struct Job { 23 | /// A unique name for the job, not necessarily human-readable 24 | #[getset(get = "pub")] 25 | uuid: Uuid, 26 | 27 | #[getset(get = "pub")] 28 | package: Package, 29 | 30 | #[getset(get = "pub")] 31 | image: ImageName, 32 | 33 | #[getset(get = "pub")] 34 | script_shebang: Shebang, 35 | 36 | #[getset(get = "pub")] 37 | script_phases: Vec, 38 | 39 | #[getset(get = "pub")] 40 | resources: Vec, 41 | } 42 | 43 | impl Job { 44 | pub fn new( 45 | pkg: Package, 46 | script_shebang: Shebang, 47 | image: ImageName, 48 | phases: Vec, 49 | resources: Vec, 50 | ) -> Self { 51 | let uuid = Uuid::new_v4(); 52 | 53 | Job { 54 | uuid, 55 | package: pkg, 56 | image, 57 | script_shebang, 58 | script_phases: phases, 59 | resources, 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/db/models/envvar.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use diesel::prelude::*; 14 | 15 | use crate::schema::envvars; 16 | use crate::schema::envvars::*; 17 | use crate::util::EnvironmentVariableName; 18 | 19 | #[derive(Debug, Identifiable, Queryable)] 20 | #[diesel(table_name = envvars)] 21 | pub struct EnvVar { 22 | pub id: i32, 23 | pub name: String, 24 | pub value: String, 25 | } 26 | 27 | #[derive(Insertable)] 28 | #[diesel(table_name = envvars)] 29 | struct NewEnvVar<'a> { 30 | pub name: &'a str, 31 | pub value: &'a str, 32 | } 33 | 34 | impl EnvVar { 35 | pub fn create_or_fetch( 36 | database_connection: &mut PgConnection, 37 | k: &EnvironmentVariableName, 38 | v: &str, 39 | ) -> Result { 40 | let new_envvar = NewEnvVar { 41 | name: k.as_ref(), 42 | value: v, 43 | }; 44 | 45 | database_connection.transaction::<_, Error, _>(|conn| { 46 | diesel::insert_into(envvars::table) 47 | .values(&new_envvar) 48 | .on_conflict_do_nothing() 49 | .execute(conn)?; 50 | 51 | dsl::envvars 52 | .filter(name.eq(k.as_ref()).and(value.eq(v))) 53 | .first::(conn) 54 | .map_err(Error::from) 55 | }) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/db/models/githash.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Context; 12 | use anyhow::Error; 13 | use anyhow::Result; 14 | use diesel::prelude::*; 15 | 16 | use crate::schema::githashes; 17 | use crate::schema::githashes::*; 18 | 19 | #[derive(Queryable)] 20 | pub struct GitHash { 21 | pub id: i32, 22 | pub hash: String, 23 | } 24 | 25 | #[derive(Insertable)] 26 | #[diesel(table_name = githashes)] 27 | struct NewGitHash<'a> { 28 | pub hash: &'a str, 29 | } 30 | 31 | impl GitHash { 32 | pub fn create_or_fetch( 33 | database_connection: &mut PgConnection, 34 | githash: &str, 35 | ) -> Result { 36 | let new_hash = NewGitHash { hash: githash }; 37 | 38 | database_connection.transaction::<_, Error, _>(|conn| { 39 | diesel::insert_into(githashes::table) 40 | .values(&new_hash) 41 | .on_conflict_do_nothing() 42 | .execute(conn)?; 43 | 44 | dsl::githashes 45 | .filter(hash.eq(githash)) 46 | .first::(conn) 47 | .map_err(Error::from) 48 | }) 49 | } 50 | 51 | pub fn with_id(database_connection: &mut PgConnection, git_hash_id: i32) -> Result { 52 | dsl::githashes 53 | .find(git_hash_id) 54 | .first::<_>(database_connection) 55 | .context("Loading GitHash") 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/util/parser.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use pom::parser::Parser as PomParser; 12 | use pom::parser::*; 13 | 14 | pub fn numbers<'a>() -> PomParser<'a, u8, Vec> { 15 | one_of(b"0123456789").repeat(1..) 16 | } 17 | 18 | pub fn letters<'a>() -> PomParser<'a, u8, Vec> { 19 | pom::parser::is_a(pom::char_class::alpha).repeat(1..) 20 | } 21 | 22 | pub fn dash<'a>() -> PomParser<'a, u8, Vec> { 23 | sym(b'-').map(|b| vec![b]) 24 | } 25 | 26 | pub fn under<'a>() -> PomParser<'a, u8, Vec> { 27 | sym(b'_').map(|b| vec![b]) 28 | } 29 | 30 | pub fn dot<'a>() -> PomParser<'a, u8, Vec> { 31 | sym(b'.').map(|b| vec![b]) 32 | } 33 | 34 | pub fn equal<'a>() -> PomParser<'a, u8, Vec> { 35 | sym(b'=').map(|b| vec![b]) 36 | } 37 | 38 | pub fn nonempty_string_with_optional_quotes<'a>() -> Parser<'a, u8, String> { 39 | let special_char = || { 40 | sym(b'\\') 41 | | sym(b'/') 42 | | sym(b'"') 43 | | sym(b'b').map(|_| b'\x08') 44 | | sym(b'f').map(|_| b'\x0C') 45 | | sym(b'n').map(|_| b'\n') 46 | | sym(b'r').map(|_| b'\r') 47 | | sym(b't').map(|_| b'\t') 48 | }; 49 | let escape_sequence = || sym(b'\\') * special_char(); 50 | 51 | let inner_string = || (none_of(b"\\\"") | escape_sequence()).repeat(1..); 52 | 53 | let string = (sym(b'"') * inner_string() - sym(b'"')) | inner_string(); 54 | string.convert(String::from_utf8) 55 | } 56 | -------------------------------------------------------------------------------- /src/commands/lint.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'lint' subcommand 12 | 13 | use std::path::Path; 14 | 15 | use anyhow::anyhow; 16 | use anyhow::Result; 17 | use clap::ArgMatches; 18 | 19 | use crate::config::*; 20 | use crate::package::PackageName; 21 | use crate::package::PackageVersionConstraint; 22 | use crate::repository::Repository; 23 | use crate::util::progress::ProgressBars; 24 | 25 | /// Implementation of the "lint" subcommand 26 | pub async fn lint( 27 | repo_path: &Path, 28 | matches: &ArgMatches, 29 | progressbars: ProgressBars, 30 | config: &Configuration, 31 | repo: Repository, 32 | ) -> Result<()> { 33 | let linter = crate::ui::find_linter_command(repo_path, config)? 34 | .ok_or_else(|| anyhow!("No linter command found"))?; 35 | let pname = matches 36 | .get_one::("package_name") 37 | .map(|s| s.to_owned()) 38 | .map(PackageName::from); 39 | let pvers = matches 40 | .get_one::("package_version_constraint") 41 | .map(|s| s.to_owned()) 42 | .map(PackageVersionConstraint::try_from) 43 | .transpose()?; 44 | 45 | let bar = progressbars.bar()?; 46 | bar.set_message("Linting package scripts..."); 47 | 48 | let iter = repo 49 | .packages() 50 | .filter(|p| pname.as_ref().map(|n| p.name() == n).unwrap_or(true)) 51 | .filter(|p| { 52 | pvers 53 | .as_ref() 54 | .map(|v| v.matches(p.version())) 55 | .unwrap_or(true) 56 | }); 57 | 58 | crate::commands::util::lint_packages(iter, &linter, config, bar).await 59 | } 60 | -------------------------------------------------------------------------------- /src/config/docker_config.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::collections::HashMap; 12 | 13 | use getset::{CopyGetters, Getters}; 14 | use serde::Deserialize; 15 | 16 | use crate::config::Endpoint; 17 | use crate::config::EndpointName; 18 | use crate::util::docker::ContainerImage; 19 | 20 | /// Configuration of the Docker daemon interfacing functionality 21 | #[derive(Debug, Getters, CopyGetters, Deserialize)] 22 | #[serde(deny_unknown_fields)] 23 | pub struct DockerConfig { 24 | /// The required Docker version 25 | /// 26 | /// If not set, it will not be checked, which might result in weird things? 27 | /// 28 | /// # Note 29 | /// 30 | /// Because the Docker API returns strings, not a version object, each compatible version must 31 | /// be listed. 32 | #[getset(get = "pub")] 33 | docker_versions: Option>, 34 | 35 | /// The required Docker API version 36 | /// 37 | /// If not set, it will not be checked, which might result in weird things? 38 | /// 39 | /// # Note 40 | /// 41 | /// Because the Docker API returns strings, not a version object, each compatible version must 42 | /// be listed. 43 | #[getset(get = "pub")] 44 | docker_api_versions: Option>, 45 | 46 | /// List of container images that are allowed for builds. 47 | /// An example: `{ name = "local:debian12-default", short_name ="deb12" }` 48 | #[getset(get = "pub")] 49 | images: Vec, 50 | 51 | /// A map of endpoints (name -> settings) that are used as container hosts to run builds on 52 | #[getset(get = "pub")] 53 | endpoints: HashMap, 54 | } 55 | -------------------------------------------------------------------------------- /src/db/models/releases.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use chrono::NaiveDateTime; 14 | use diesel::prelude::*; 15 | 16 | use crate::db::models::Artifact; 17 | use crate::db::models::ReleaseStore; 18 | use crate::schema::releases; 19 | use crate::schema::releases::*; 20 | 21 | #[derive(Debug, Identifiable, Queryable, Associations)] 22 | #[diesel(belongs_to(Artifact))] 23 | #[diesel(belongs_to(ReleaseStore))] 24 | pub struct Release { 25 | pub id: i32, 26 | pub artifact_id: i32, 27 | pub release_date: NaiveDateTime, 28 | pub release_store_id: i32, 29 | } 30 | 31 | #[derive(Insertable)] 32 | #[diesel(table_name = releases)] 33 | struct NewRelease<'a> { 34 | pub artifact_id: i32, 35 | pub release_date: &'a NaiveDateTime, 36 | pub release_store_id: i32, 37 | } 38 | 39 | impl Release { 40 | pub fn create<'a>( 41 | database_connection: &mut PgConnection, 42 | art: &Artifact, 43 | date: &'a NaiveDateTime, 44 | store: &'a ReleaseStore, 45 | ) -> Result { 46 | let new_rel = NewRelease { 47 | artifact_id: art.id, 48 | release_date: date, 49 | release_store_id: store.id, 50 | }; 51 | 52 | database_connection.transaction::<_, Error, _>(|conn| { 53 | diesel::insert_into(releases::table) 54 | .values(&new_rel) 55 | .execute(conn)?; 56 | 57 | dsl::releases 58 | .filter(artifact_id.eq(art.id).and(release_date.eq(date))) 59 | .first::(conn) 60 | .map_err(Error::from) 61 | }) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/log/item.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use colored::Colorize; 14 | 15 | #[derive(Debug, PartialEq, Eq, Hash)] 16 | pub enum LogItem { 17 | /// A line from the log, unmodified 18 | Line(Vec), 19 | 20 | /// A progress report 21 | Progress(usize), 22 | 23 | /// The name of the current phase the process is in 24 | CurrentPhase(String), 25 | 26 | /// The end-state of the process 27 | /// Either Ok or Error 28 | State(Result<(), String>), 29 | } 30 | 31 | impl LogItem { 32 | pub fn display(&self) -> Result { 33 | match self { 34 | LogItem::Line(s) => Ok(Display(String::from_utf8(s.to_vec())?.normal())), 35 | LogItem::Progress(u) => Ok(Display(format!("#BUTIDO:PROGRESS:{u}").cyan())), 36 | LogItem::CurrentPhase(p) => Ok(Display(format!("#BUTIDO:PHASE:{p}").cyan())), 37 | LogItem::State(Ok(())) => Ok(Display("#BUTIDO:STATE:OK".to_string().green())), 38 | LogItem::State(Err(s)) => Ok(Display(format!("#BUTIDO:STATE:ERR:{s}").red())), 39 | } 40 | } 41 | 42 | pub fn raw(&self) -> Result { 43 | match self { 44 | LogItem::Line(s) => String::from_utf8(s.to_vec()).map_err(Error::from), 45 | LogItem::Progress(u) => Ok(format!("#BUTIDO:PROGRESS:{u}")), 46 | LogItem::CurrentPhase(p) => Ok(format!("#BUTIDO:PHASE:{p}")), 47 | LogItem::State(Ok(())) => Ok("#BUTIDO:STATE:OK".to_string()), 48 | LogItem::State(Err(s)) => Ok(format!("#BUTIDO:STATE:ERR:{s}")), 49 | } 50 | } 51 | } 52 | 53 | #[derive(parse_display::Display)] 54 | #[display("{0}")] 55 | pub struct Display(colored::ColoredString); 56 | -------------------------------------------------------------------------------- /src/filestore/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Module containing utilities for the filestore implementation 12 | //! 13 | 14 | use std::collections::HashSet; 15 | 16 | use anyhow::Result; 17 | use indicatif::ProgressBar; 18 | use tracing::trace; 19 | 20 | use crate::filestore::path::ArtifactPath; 21 | use crate::filestore::path::StoreRoot; 22 | 23 | /// The actual filestore implementation 24 | /// 25 | /// Because the "staging" filestore and the "release" filestore function the same underneath, we 26 | /// provide this type as the implementation. 27 | /// 28 | /// It can then be wrapped into the actual interface of this module with specialized functionality. 29 | #[derive(getset::Getters)] 30 | pub struct FileStoreImpl { 31 | #[getset(get = "pub")] 32 | root_path: StoreRoot, 33 | store: HashSet, 34 | } 35 | 36 | impl FileStoreImpl { 37 | /// Loads the passed path recursively 38 | pub fn load(root_path: StoreRoot, progress: &ProgressBar) -> Result { 39 | let store = root_path 40 | .find_artifacts_recursive() 41 | .inspect(|path| { 42 | trace!("Found artifact path: {:?}", path); 43 | progress.tick(); 44 | }) 45 | .collect::>>()?; 46 | 47 | Ok(FileStoreImpl { root_path, store }) 48 | } 49 | 50 | pub fn get(&self, artifact_path: &ArtifactPath) -> Option<&ArtifactPath> { 51 | self.store.get(artifact_path) 52 | } 53 | 54 | pub(in crate::filestore) fn load_from_path<'a>( 55 | &mut self, 56 | artifact_path: &'a ArtifactPath, 57 | ) -> &'a ArtifactPath { 58 | if !self.store.contains(artifact_path) { 59 | self.store.insert(artifact_path.clone()); 60 | } 61 | artifact_path 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/commands/env_of.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'env-of' subcommand 12 | 13 | use anyhow::Result; 14 | use clap::ArgMatches; 15 | use tracing::trace; 16 | 17 | use crate::package::PackageName; 18 | use crate::package::PackageVersion; 19 | use crate::repository::Repository; 20 | 21 | /// Implementation of the "env_of" subcommand 22 | pub async fn env_of(matches: &ArgMatches, repo: Repository) -> Result<()> { 23 | use filters::filter::Filter; 24 | use std::io::Write; 25 | 26 | let package_filter = { 27 | let name = matches 28 | .get_one::("package_name") 29 | .map(|s| s.to_owned()) 30 | .map(PackageName::from) 31 | .unwrap(); 32 | let version = matches 33 | .get_one::("package_version") 34 | .map(|s| s.to_owned()) 35 | .map(PackageVersion::try_from) 36 | .unwrap()?; 37 | trace!( 38 | "Checking for package with name = {} and version = {:?}", 39 | name, 40 | version 41 | ); 42 | 43 | crate::util::filters::build_package_filter_by_name(name).and( 44 | crate::util::filters::build_package_filter_by_version(version), 45 | ) 46 | }; 47 | 48 | let mut stdout = std::io::stdout(); 49 | repo.packages() 50 | .filter(|package| package_filter.filter(package)) 51 | .inspect(|pkg| trace!("Found package: {:?}", pkg)) 52 | .try_for_each(|pkg| { 53 | if let Some(hm) = pkg.environment() { 54 | for (key, value) in hm { 55 | writeln!(stdout, "{key} = '{value}'")?; 56 | } 57 | } else { 58 | writeln!(stdout, "No environment")?; 59 | } 60 | 61 | Ok(()) 62 | }) 63 | } 64 | -------------------------------------------------------------------------------- /src/db/models/image.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use diesel::prelude::*; 14 | 15 | use crate::schema::images; 16 | use crate::schema::images::*; 17 | use crate::util::docker::ImageName; 18 | 19 | #[derive(Identifiable, Queryable)] 20 | pub struct Image { 21 | pub id: i32, 22 | pub name: String, 23 | } 24 | 25 | #[derive(Insertable)] 26 | #[diesel(table_name = images)] 27 | struct NewImage<'a> { 28 | pub name: &'a str, 29 | } 30 | 31 | impl Image { 32 | pub fn create_or_fetch( 33 | database_connection: &mut PgConnection, 34 | image_name: &ImageName, 35 | ) -> Result { 36 | let new_image = NewImage { 37 | name: image_name.as_ref(), 38 | }; 39 | 40 | database_connection.transaction::<_, Error, _>(|conn| { 41 | diesel::insert_into(images::table) 42 | .values(&new_image) 43 | .on_conflict_do_nothing() 44 | .execute(conn)?; 45 | 46 | dsl::images 47 | .filter(name.eq(image_name.as_ref())) 48 | .first::(conn) 49 | .map_err(Error::from) 50 | }) 51 | } 52 | 53 | pub fn fetch_for_job( 54 | database_connection: &mut PgConnection, 55 | j: &crate::db::models::Job, 56 | ) -> Result> { 57 | Self::fetch_by_id(database_connection, j.image_id) 58 | } 59 | 60 | pub fn fetch_by_id(database_connection: &mut PgConnection, iid: i32) -> Result> { 61 | match dsl::images 62 | .filter(id.eq(iid)) 63 | .first::(database_connection) 64 | { 65 | Err(diesel::result::Error::NotFound) => Ok(None), 66 | Err(e) => Err(Error::from(e)), 67 | Ok(i) => Ok(Some(i)), 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/ui/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Utility functions for the UI 12 | 13 | use std::path::Path; 14 | use std::path::PathBuf; 15 | 16 | use anyhow::anyhow; 17 | use anyhow::Result; 18 | use itertools::Itertools; 19 | 20 | use crate::config::Configuration; 21 | use crate::package::Script; 22 | 23 | mod package; 24 | pub use crate::ui::package::*; 25 | 26 | pub fn script_to_printable( 27 | script: &Script, 28 | highlight: bool, 29 | highlight_theme: &str, 30 | line_numbers: bool, 31 | ) -> Result { 32 | let script = if highlight { 33 | let script = script.highlighted(highlight_theme); 34 | if line_numbers { 35 | script 36 | .lines_numbered()? 37 | .map(|(i, s)| format!("{i:>4} | {s}")) 38 | .join("") 39 | } else { 40 | script.lines()?.join("") 41 | } 42 | } else if line_numbers { 43 | script 44 | .lines_numbered() 45 | .map(|(i, s)| format!("{i:>4} | {s}")) 46 | .join("") 47 | } else { 48 | script.to_string() 49 | }; 50 | 51 | Ok(script) 52 | } 53 | 54 | pub fn find_linter_command(repo_path: &Path, config: &Configuration) -> Result> { 55 | match config.script_linter().as_ref() { 56 | None => Ok(None), 57 | Some(linter) => { 58 | if linter.is_absolute() { 59 | Ok(Some(linter.to_path_buf())) 60 | } else { 61 | let linter = repo_path.join(linter); 62 | if !linter.is_file() { 63 | Err(anyhow!( 64 | "Cannot find linter command, searched in: {}", 65 | linter.display() 66 | )) 67 | } else { 68 | Ok(Some(linter)) 69 | } 70 | } 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/db/models/endpoint.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use diesel::prelude::*; 14 | 15 | use crate::config::EndpointName; 16 | use crate::schema::endpoints; 17 | use crate::schema::endpoints::*; 18 | 19 | #[derive(Identifiable, Queryable, Eq, PartialEq)] 20 | pub struct Endpoint { 21 | pub id: i32, 22 | pub name: String, 23 | } 24 | 25 | #[derive(Insertable)] 26 | #[diesel(table_name = endpoints)] 27 | struct NewEndpoint<'a> { 28 | pub name: &'a str, 29 | } 30 | 31 | impl Endpoint { 32 | pub fn create_or_fetch( 33 | database_connection: &mut PgConnection, 34 | ep_name: &EndpointName, 35 | ) -> Result { 36 | let new_ep = NewEndpoint { 37 | name: ep_name.as_ref(), 38 | }; 39 | 40 | database_connection.transaction::<_, Error, _>(|conn| { 41 | diesel::insert_into(endpoints::table) 42 | .values(&new_ep) 43 | .on_conflict_do_nothing() 44 | .execute(conn)?; 45 | 46 | dsl::endpoints 47 | .filter(name.eq(ep_name.as_ref())) 48 | .first::(conn) 49 | .map_err(Error::from) 50 | }) 51 | } 52 | 53 | pub fn fetch_for_job( 54 | database_connection: &mut PgConnection, 55 | j: &crate::db::models::Job, 56 | ) -> Result> { 57 | Self::fetch_by_id(database_connection, j.endpoint_id) 58 | } 59 | 60 | pub fn fetch_by_id( 61 | database_connection: &mut PgConnection, 62 | eid: i32, 63 | ) -> Result> { 64 | match dsl::endpoints 65 | .filter(id.eq(eid)) 66 | .first::(database_connection) 67 | { 68 | Err(diesel::result::Error::NotFound) => Ok(None), 69 | Err(e) => Err(Error::from(e)), 70 | Ok(e) => Ok(Some(e)), 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/job/dag.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::Getters; 12 | use petgraph::acyclic::Acyclic; 13 | use petgraph::graph::DiGraph; 14 | use uuid::Uuid; 15 | 16 | use crate::job::Job; 17 | use crate::job::JobResource; 18 | use crate::package::DependencyType; 19 | use crate::package::Package; 20 | use crate::package::PhaseName; 21 | use crate::package::Shebang; 22 | use crate::util::docker::ImageName; 23 | 24 | #[derive(Debug, Getters)] 25 | pub struct Dag { 26 | #[getset(get = "pub")] 27 | dag: Acyclic>, 28 | } 29 | 30 | impl Dag { 31 | pub fn from_package_dag( 32 | dag: crate::package::Dag, 33 | script_shebang: Shebang, 34 | image: ImageName, 35 | phases: Vec, 36 | resources: Vec, 37 | ) -> Self { 38 | let build_job = |_, p: &Package| { 39 | Job::new( 40 | p.clone(), 41 | script_shebang.clone(), 42 | image.clone(), 43 | phases.clone(), 44 | resources.clone(), 45 | ) 46 | }; 47 | 48 | Dag { 49 | dag: Acyclic::<_>::try_from_graph(dag.dag().map(build_job, |_, e| (*e).clone())) 50 | .unwrap(), // The dag.dag() is already acyclic so this cannot fail 51 | } 52 | } 53 | 54 | pub fn iter(&self) -> impl Iterator> { 55 | self.dag.node_indices().map(move |idx| { 56 | let job = self.dag.node_weight(idx).unwrap(); // TODO 57 | let children = self.dag.neighbors_directed(idx, petgraph::Outgoing); 58 | let children_uuids = children 59 | .filter_map(|node_idx| self.dag.node_weight(node_idx)) 60 | .map(Job::uuid) 61 | .cloned() 62 | .collect(); 63 | 64 | JobDefinition { 65 | job, 66 | dependencies: children_uuids, 67 | } 68 | }) 69 | } 70 | } 71 | 72 | #[derive(Debug)] 73 | pub struct JobDefinition<'a> { 74 | pub job: &'a Job, 75 | pub dependencies: Vec, 76 | } 77 | -------------------------------------------------------------------------------- /src/repository/fs/path.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::path::Component; 12 | 13 | use anyhow::anyhow; 14 | use anyhow::Result; 15 | 16 | /// Helper type for filtering for paths we need or don't need 17 | /// 18 | /// We either have a directory, which has a name, or we have a pkg.toml file, which is of interest. 19 | /// All other files can be ignored and thus are not represented by this type. 20 | /// 21 | /// The PathComponent::DirName(_) represents a _part_ of a Path. Something like 22 | /// 23 | /// ```ignore 24 | /// let p = PathBuf::from("foo/bar/baz") 25 | /// p.components().map(PathComponent::DirName) // does not actually work because of types 26 | /// ``` 27 | /// 28 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 29 | pub enum PathComponent { 30 | PkgToml, 31 | DirName(String), 32 | } 33 | 34 | impl TryFrom<&std::path::Component<'_>> for PathComponent { 35 | type Error = anyhow::Error; 36 | 37 | fn try_from(c: &std::path::Component) -> Result { 38 | match *c { 39 | Component::Prefix(_) => anyhow::bail!("Unexpected path component: Prefix"), 40 | Component::RootDir => anyhow::bail!("Unexpected path component: RootDir"), 41 | Component::CurDir => anyhow::bail!("Unexpected path component: CurDir"), 42 | Component::ParentDir => anyhow::bail!("Unexpected path component: ParentDir"), 43 | Component::Normal(filename) => { 44 | let filename = filename.to_str().ok_or_else(|| anyhow!("UTF8-error"))?; 45 | if filename == "pkg.toml" { 46 | Ok(PathComponent::PkgToml) 47 | } else { 48 | Ok(PathComponent::DirName(filename.to_string())) 49 | } 50 | } 51 | } 52 | } 53 | } 54 | 55 | impl PathComponent { 56 | /// Helper fn to get the directory name of this PathComponent if it is a PathComponent::DirName 57 | /// or None if it is not. 58 | pub fn dir_name(&self) -> Option<&str> { 59 | match self { 60 | PathComponent::PkgToml => None, 61 | PathComponent::DirName(dn) => Some(dn), 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/config/endpoint_config.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use getset::{CopyGetters, Getters}; 12 | use serde::Deserialize; 13 | 14 | #[derive(Debug, Clone, Deserialize, Eq, PartialEq, Ord, PartialOrd, Hash)] 15 | #[serde(transparent)] 16 | pub struct EndpointName(String); 17 | 18 | impl From for EndpointName { 19 | fn from(s: String) -> Self { 20 | EndpointName(s) 21 | } 22 | } 23 | 24 | impl std::fmt::Display for EndpointName { 25 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { 26 | self.0.fmt(f) 27 | } 28 | } 29 | 30 | impl AsRef for EndpointName { 31 | fn as_ref(&self) -> &str { 32 | self.0.as_ref() 33 | } 34 | } 35 | 36 | impl EndpointName { 37 | pub fn len(&self) -> usize { 38 | self.0.len() 39 | } 40 | } 41 | 42 | /// Configuration of a single endpoint 43 | #[derive(Clone, Debug, Getters, CopyGetters, Deserialize)] 44 | #[serde(deny_unknown_fields)] 45 | pub struct Endpoint { 46 | /// The URI where the endpoint is reachable 47 | #[getset(get = "pub")] 48 | uri: String, 49 | 50 | /// The type of the endpoint (either "socket" or "http") 51 | #[getset(get = "pub")] 52 | endpoint_type: EndpointType, 53 | 54 | /// Maximum number of jobs which are allowed on this endpoint 55 | #[getset(get_copy = "pub")] 56 | maxjobs: usize, 57 | 58 | /// Sets the networking mode for the containers. 59 | /// Supported standard values are: "bridge", "host", "none", and "container:". Any 60 | /// other value is taken as a custom network's name to which this container should connect to. 61 | /// (See https://docs.docker.com/engine/api/v1.45/#tag/Image/operation/ImageBuild) 62 | #[getset(get = "pub")] 63 | network_mode: Option, 64 | 65 | /// Timeout in seconds for connecting to this endpoint 66 | #[getset(get = "pub")] 67 | timeout: Option, 68 | } 69 | 70 | /// The type of an endpoint 71 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] 72 | pub enum EndpointType { 73 | #[serde(rename = "socket")] 74 | Socket, 75 | #[serde(rename = "http")] 76 | Http, 77 | } 78 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # Documentation for this configuration file: 2 | # https://embarkstudios.github.io/cargo-deny/checks/cfg.html 3 | 4 | # GitHub link: https://github.com/EmbarkStudios/cargo-deny 5 | 6 | [licenses] 7 | # List of explicitly allowed licenses 8 | # See https://spdx.org/licenses/ for list of possible licenses 9 | # [possible values: any SPDX 3.7 short identifier (+ optional exception)]. 10 | allow = [ 11 | "Apache-2.0", 12 | "BSD-2-Clause", 13 | "BSD-3-Clause", 14 | "EPL-2.0", 15 | "MIT", 16 | "MPL-2.0", 17 | "Unicode-3.0", 18 | "Zlib" 19 | ] 20 | 21 | # The confidence threshold for detecting a license from license text. 22 | # The higher the value, the more closely the license text must be to the 23 | # canonical license text of a valid SPDX license file. 24 | # [possible values: any between 0.0 and 1.0]. 25 | confidence-threshold = 0.8 26 | 27 | [bans] 28 | # Lint level for when multiple versions of the same crate are detected 29 | multiple-versions = "warn" 30 | 31 | # The graph highlighting used when creating dotgraphs for crates 32 | # with multiple versions 33 | # * lowest-version - The path to the lowest versioned duplicate is highlighted 34 | # * simplest-path - The path to the version with the fewest edges is highlighted 35 | # * all - Both lowest-version and simplest-path are used 36 | highlight = "all" 37 | 38 | # List of crates that are allowed. Use with care! 39 | allow = [ 40 | ] 41 | 42 | # List of crates to deny 43 | deny = [ 44 | # Each entry the name of a crate and a version range. If version is 45 | # not specified, all versions will be matched. 46 | { crate = "fuchsia-cprng" } 47 | ] 48 | 49 | # Certain crates/versions that will be skipped when doing duplicate detection. 50 | skip = [ 51 | ] 52 | 53 | # Similarly to `skip` allows you to skip certain crates during duplicate 54 | # detection. Unlike skip, it also includes the entire tree of transitive 55 | # dependencies starting at the specified crate, up to a certain depth, which is 56 | # by default infinite. 57 | skip-tree = [ 58 | ] 59 | 60 | 61 | [advisories] 62 | ignore = [ 63 | # Ignore an "INFO Unmaintained" advisory for the yaml-rust crate that the 64 | # "syntect" crate uses. This can be removed once 65 | # https://github.com/trishume/syntect/issues/537 is resolved (replace 66 | # yaml-rust with yaml-rust2): 67 | { id = "RUSTSEC-2024-0320", reason = "Only an informative advisory that the crate is unmaintained and the maintainer unreachable" }, 68 | ] 69 | -------------------------------------------------------------------------------- /src/db/models/package.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::ops::Deref; 12 | 13 | use anyhow::Error; 14 | use anyhow::Result; 15 | use diesel::prelude::*; 16 | 17 | use crate::schema::packages; 18 | use crate::schema::packages::*; 19 | 20 | #[derive(Debug, Identifiable, Queryable, Eq, PartialEq)] 21 | pub struct Package { 22 | pub id: i32, 23 | pub name: String, 24 | pub version: String, 25 | } 26 | 27 | #[derive(Insertable)] 28 | #[diesel(table_name = packages)] 29 | struct NewPackage<'a> { 30 | pub name: &'a str, 31 | pub version: &'a str, 32 | } 33 | 34 | impl Package { 35 | pub fn create_or_fetch( 36 | database_connection: &mut PgConnection, 37 | p: &crate::package::Package, 38 | ) -> Result { 39 | let new_package = NewPackage { 40 | name: p.name().deref(), 41 | version: p.version().deref(), 42 | }; 43 | 44 | database_connection.transaction::<_, Error, _>(|conn| { 45 | diesel::insert_into(packages::table) 46 | .values(&new_package) 47 | .on_conflict_do_nothing() 48 | .execute(conn)?; 49 | 50 | dsl::packages 51 | .filter({ 52 | let p_name = p.name().deref(); 53 | let p_vers = p.version().deref(); 54 | 55 | name.eq(p_name).and(version.eq(p_vers)) 56 | }) 57 | .first::(conn) 58 | .map_err(Error::from) 59 | }) 60 | } 61 | 62 | pub fn fetch_for_job( 63 | database_connection: &mut PgConnection, 64 | j: &crate::db::models::Job, 65 | ) -> Result> { 66 | Self::fetch_by_id(database_connection, j.package_id) 67 | } 68 | 69 | pub fn fetch_by_id( 70 | database_connection: &mut PgConnection, 71 | pid: i32, 72 | ) -> Result> { 73 | match dsl::packages 74 | .filter(id.eq(pid)) 75 | .first::(database_connection) 76 | { 77 | Err(diesel::result::Error::NotFound) => Ok(None), 78 | Err(e) => Err(Error::from(e)), 79 | Ok(p) => Ok(Some(p)), 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This file describes how you can contribute to butido. 4 | 5 | ## Checklist 6 | 7 | Those checks are performed/enforced via CI but you probably want to go over the 8 | checklist before submitting a PR to avoid unnecessary PR/CI iteration cycles: 9 | 10 | * [ ] All commits are signed off (`--signoff` - Why? See below) 11 | * [ ] No `!fixup` (etc.) commits 12 | * [ ] I ran `cargo check --all-targets` 13 | * [ ] I ran `cargo test` 14 | * [ ] I ran `cargo clippy --all-targets` 15 | 16 | ## Reporting issues / Questions 17 | 18 | If you have a question regarding butido, feel free to open an issue or write an 19 | email to the authors. 20 | 21 | 22 | ## Submitting patches 23 | 24 | Please make sure you `--signoff` your commits/patchmails. 25 | By that you agree to the 26 | [developer certificate](https://developercertificate.org/): 27 | 28 | > Developer Certificate of Origin 29 | > Version 1.1 30 | > 31 | > Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 32 | > 1 Letterman Drive 33 | > Suite D4700 34 | > San Francisco, CA, 94129 35 | > 36 | > Everyone is permitted to copy and distribute verbatim copies of this 37 | > license document, but changing it is not allowed. 38 | > 39 | > 40 | > Developer's Certificate of Origin 1.1 41 | > 42 | > By making a contribution to this project, I certify that: 43 | > 44 | > (a) The contribution was created in whole or in part by me and I 45 | > have the right to submit it under the open source license 46 | > indicated in the file; or 47 | > 48 | > (b) The contribution is based upon previous work that, to the best 49 | > of my knowledge, is covered under an appropriate open source 50 | > license and I have the right under that license to submit that 51 | > work with modifications, whether created in whole or in part 52 | > by me, under the same open source license (unless I am 53 | > permitted to submit under a different license), as indicated 54 | > in the file; or 55 | > 56 | > (c) The contribution was provided directly to me by some other 57 | > person who certified (a), (b) or (c) and I have not modified 58 | > it. 59 | > 60 | > (d) I understand and agree that this project and the contribution 61 | > are public and that a record of the contribution (including all 62 | > personal information I submit with it, including my sign-off) is 63 | > maintained indefinitely and may be redistributed consistent with 64 | > this project or the open source license(s) involved. 65 | 66 | ## Bumping the MSRV 67 | 68 | Use `git grep -E '[0-9]+(\.[0-9]+){2}.+MSRV'` and update all matches. 69 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "butido" 3 | version = "0.5.0" 4 | authors = [ 5 | # Only for the current/active maintainers (sorted alphabetically by the surname) 6 | # All other authors are listed in the "Authors" section of README.md 7 | "Nico Steinle ", # @ammernico 8 | "Michael Weiss ", # @primeos-work 9 | ] 10 | edition = "2024" 11 | rust-version = "1.88.0" # MSRV 12 | license = "EPL-2.0" 13 | 14 | description = "Linux package tool utilizing Docker, PostgreSQL, and TOML" 15 | homepage = "https://github.com/science-computing/butido" 16 | repository = "https://github.com/science-computing/butido" 17 | readme = "README.md" 18 | keywords = ["docker", "postgres", "linux", "packaging", "packages"] 19 | categories = ["development-tools"] 20 | 21 | [badges] 22 | maintenance = { status = "passively-maintained" } 23 | 24 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 25 | 26 | [dependencies] 27 | anyhow = "1" 28 | aquamarine = "0.6" 29 | ascii_table = { version = "5", features = ["color_codes", "wide_characters"] } 30 | bytesize = "2" 31 | chrono = "0.4" 32 | clap = { version = "4", features = ["cargo"] } 33 | clap_complete = "4" 34 | colored = "3" 35 | config = { version = "0.15", default-features = false, features = [ "toml" ] } 36 | csv = "1" 37 | dialoguer = "0.12" 38 | diesel = { version = "2", features = ["postgres", "chrono", "uuid", "serde_json", "r2d2"] } 39 | diesel_migrations = "2" 40 | filters = "0.4" 41 | futures = "0.3" 42 | getset = "0.1" 43 | git2 = "0.20" 44 | handlebars = { version = "6", features = ["no_logging"] } 45 | human-panic = "2" 46 | humantime = "2" 47 | indicatif = "0.18" 48 | indoc = "2" 49 | itertools = "0.14" 50 | lazy_static = "1" 51 | once_cell = "1" 52 | parse-display = "0.10" 53 | petgraph = "0.8" 54 | pom = "3" 55 | ptree = { version = "0.5", default-features = false } 56 | rand = "0.9" 57 | rayon = "1" 58 | regex = "1" 59 | reqwest = { version = "0.12", features = [ "stream" ] } 60 | resiter = "0.5" 61 | rlimit = "0.10" 62 | rustversion = "1" 63 | semver = "1" 64 | serde = "1" 65 | serde_json = "1" 66 | sha1 = "0.10" 67 | sha2 = "0.10" 68 | shiplift = "0.7" 69 | syntect = "5" 70 | tar = "0.4" 71 | terminal_size = "0.4" 72 | tokio = { version = "1", features = ["macros", "fs", "process", "io-util", "signal", "time"] } 73 | tokio-util = "0.7" 74 | tokio-stream = "0.1" 75 | toml = "0.9" 76 | tracing = "0.1" 77 | tracing-chrome = "0.7" 78 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 79 | typed-builder = "0.23" 80 | unindent = "0.2" 81 | url = { version = "2", features = ["serde"] } 82 | uuid = { version = "1", features = ["serde", "v4"] } 83 | walkdir = "2" 84 | which = "8" 85 | xdg = "3" 86 | 87 | [build-dependencies] 88 | anyhow = "1" 89 | git_info = "0.1" 90 | vergen = { version = "8", features = ["build", "cargo", "git", "gitcl"] } 91 | -------------------------------------------------------------------------------- /src/db/models/submit.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Context; 12 | use anyhow::Error; 13 | use anyhow::Result; 14 | use chrono::NaiveDateTime; 15 | use diesel::prelude::*; 16 | 17 | use crate::db::models::GitHash; 18 | use crate::db::models::Image; 19 | use crate::db::models::Package; 20 | use crate::schema::submits; 21 | use crate::schema::submits::*; 22 | 23 | #[derive(Clone, Debug, Eq, PartialEq, Identifiable, Queryable, Associations)] 24 | #[diesel(belongs_to(Package, foreign_key = requested_package_id))] 25 | #[diesel(belongs_to(Image, foreign_key = requested_image_id))] 26 | #[diesel(table_name = submits)] 27 | pub struct Submit { 28 | pub id: i32, 29 | pub uuid: ::uuid::Uuid, 30 | pub submit_time: NaiveDateTime, 31 | pub requested_image_id: i32, 32 | pub requested_package_id: i32, 33 | pub repo_hash_id: i32, 34 | } 35 | 36 | #[derive(Insertable)] 37 | #[diesel(table_name = submits)] 38 | struct NewSubmit<'a> { 39 | pub uuid: &'a ::uuid::Uuid, 40 | pub submit_time: &'a NaiveDateTime, 41 | pub requested_image_id: i32, 42 | pub requested_package_id: i32, 43 | pub repo_hash_id: i32, 44 | } 45 | 46 | impl Submit { 47 | pub fn create( 48 | database_connection: &mut PgConnection, 49 | submit_datetime: &NaiveDateTime, 50 | submit_id: &::uuid::Uuid, 51 | requested_image: &Image, 52 | requested_package: &Package, 53 | repo_hash: &GitHash, 54 | ) -> Result { 55 | let new_submit = NewSubmit { 56 | uuid: submit_id, 57 | submit_time: submit_datetime, 58 | requested_image_id: requested_image.id, 59 | requested_package_id: requested_package.id, 60 | repo_hash_id: repo_hash.id, 61 | }; 62 | 63 | database_connection.transaction::<_, Error, _>(|conn| { 64 | diesel::insert_into(submits::table) 65 | .values(&new_submit) 66 | // required because if we re-use the staging store, we do not create a new UUID but re-use the old one 67 | .on_conflict_do_nothing() 68 | .execute(conn) 69 | .context("Inserting new submit into submits table")?; 70 | 71 | Self::with_id(conn, submit_id) 72 | }) 73 | } 74 | 75 | pub fn with_id( 76 | database_connection: &mut PgConnection, 77 | submit_id: &::uuid::Uuid, 78 | ) -> Result { 79 | dsl::submits 80 | .filter(submits::uuid.eq(submit_id)) 81 | .first::(database_connection) 82 | .context("Loading submit") 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/filestore/staging.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::fmt::Debug; 12 | 13 | use anyhow::anyhow; 14 | use anyhow::Context; 15 | use anyhow::Result; 16 | use futures::stream::Stream; 17 | use indicatif::ProgressBar; 18 | use tracing::trace; 19 | 20 | use crate::filestore::path::ArtifactPath; 21 | use crate::filestore::path::StoreRoot; 22 | use crate::filestore::util::FileStoreImpl; 23 | 24 | pub struct StagingStore(pub(in crate::filestore) FileStoreImpl); 25 | 26 | impl Debug for StagingStore { 27 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { 28 | write!(f, "StagingStore(root: {})", self.0.root_path().display()) 29 | } 30 | } 31 | 32 | impl StagingStore { 33 | pub fn load(root: StoreRoot, progress: &ProgressBar) -> Result { 34 | FileStoreImpl::load(root, progress).map(StagingStore) 35 | } 36 | 37 | /// Write the passed tar stream to the file store 38 | /// 39 | /// # Returns 40 | /// 41 | /// Returns a list of Artifacts that were written from the stream 42 | pub async fn write_files_from_tar_stream(&mut self, stream: S) -> Result> 43 | where 44 | S: Stream>>, 45 | { 46 | use futures::stream::TryStreamExt; 47 | 48 | let dest = self.0.root_path(); 49 | stream 50 | .try_concat() 51 | .await 52 | .and_then(|bytes| { 53 | trace!("Unpacking archive to {}", dest.display()); 54 | dest.unpack_archive_here(tar::Archive::new(&bytes[..])) 55 | .context("Unpacking TAR") 56 | }) 57 | .context("Concatenating the output bytestream")? 58 | .into_iter() 59 | .inspect(|p| trace!("Trying to load into staging store: {}", p.display())) 60 | .filter_map(|path| { 61 | if self.0.root_path().is_dir(&path) { 62 | None 63 | } else { 64 | ArtifactPath::new(path.to_path_buf()) 65 | .inspect(|r| trace!("Loaded from path {} = {:?}", path.display(), r)) 66 | .with_context(|| anyhow!("Loading from path: {}", path.display())) 67 | .map(|ap| self.0.load_from_path(&ap).clone()) 68 | .map(Some) 69 | .transpose() 70 | } 71 | }) 72 | .collect() 73 | } 74 | 75 | pub fn root_path(&self) -> &StoreRoot { 76 | self.0.root_path() 77 | } 78 | 79 | pub fn get(&self, p: &ArtifactPath) -> Option<&ArtifactPath> { 80 | self.0.get(p) 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/db/models/artifact.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use crate::filestore::path::ArtifactPath; 12 | use std::path::PathBuf; 13 | 14 | use anyhow::anyhow; 15 | use anyhow::Context; 16 | use anyhow::Error; 17 | use anyhow::Result; 18 | use chrono::NaiveDateTime; 19 | use diesel::prelude::*; 20 | 21 | use crate::db::models::Job; 22 | use crate::db::models::Release; 23 | use crate::schema::artifacts; 24 | use crate::schema::artifacts::*; 25 | 26 | #[derive(Debug, Identifiable, Queryable, Associations)] 27 | #[diesel(belongs_to(Job))] 28 | pub struct Artifact { 29 | pub id: i32, 30 | pub path: String, 31 | pub job_id: i32, 32 | } 33 | 34 | #[derive(Insertable)] 35 | #[diesel(table_name = artifacts)] 36 | struct NewArtifact<'a> { 37 | pub path: &'a str, 38 | pub job_id: i32, 39 | } 40 | 41 | impl Artifact { 42 | pub fn path_buf(&self) -> PathBuf { 43 | PathBuf::from(&self.path) 44 | } 45 | 46 | pub fn released( 47 | self, 48 | database_connection: &mut PgConnection, 49 | release_date: &NaiveDateTime, 50 | release_store_name: &str, 51 | ) -> Result { 52 | let rs = crate::db::models::ReleaseStore::create(database_connection, release_store_name)?; 53 | crate::db::models::Release::create(database_connection, &self, release_date, &rs) 54 | } 55 | 56 | pub fn get_release(&self, database_connection: &mut PgConnection) -> Result> { 57 | use crate::schema; 58 | 59 | schema::artifacts::table 60 | .inner_join(schema::releases::table) 61 | .filter(schema::releases::artifact_id.eq(self.id)) 62 | .select(schema::releases::all_columns) 63 | .first::(database_connection) 64 | .optional() 65 | .map_err(Error::from) 66 | } 67 | 68 | pub fn create( 69 | database_connection: &mut PgConnection, 70 | art_path: &ArtifactPath, 71 | job: &Job, 72 | ) -> Result { 73 | let path_str = art_path 74 | .to_str() 75 | .ok_or_else(|| anyhow!("Path is not valid UTF-8: {}", art_path.display())) 76 | .context("Writing artifact to database")?; 77 | let new_art = NewArtifact { 78 | path: path_str, 79 | job_id: job.id, 80 | }; 81 | 82 | database_connection.transaction::<_, Error, _>(|conn| { 83 | diesel::insert_into(artifacts::table) 84 | .values(&new_art) 85 | .execute(conn)?; 86 | 87 | dsl::artifacts 88 | .filter(path.eq(path_str).and(job_id.eq(job.id))) 89 | .first::(conn) 90 | .map_err(Error::from) 91 | }) 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/schema.rs: -------------------------------------------------------------------------------- 1 | table! { 2 | artifacts (id) { 3 | id -> Int4, 4 | path -> Varchar, 5 | job_id -> Int4, 6 | } 7 | } 8 | 9 | table! { 10 | endpoints (id) { 11 | id -> Int4, 12 | name -> Varchar, 13 | } 14 | } 15 | 16 | table! { 17 | envvars (id) { 18 | id -> Int4, 19 | name -> Varchar, 20 | value -> Varchar, 21 | } 22 | } 23 | 24 | table! { 25 | githashes (id) { 26 | id -> Int4, 27 | hash -> Varchar, 28 | } 29 | } 30 | 31 | table! { 32 | images (id) { 33 | id -> Int4, 34 | name -> Varchar, 35 | } 36 | } 37 | 38 | table! { 39 | job_envs (id) { 40 | id -> Int4, 41 | job_id -> Int4, 42 | env_id -> Int4, 43 | } 44 | } 45 | 46 | table! { 47 | jobs (id) { 48 | id -> Int4, 49 | submit_id -> Int4, 50 | endpoint_id -> Int4, 51 | package_id -> Int4, 52 | image_id -> Int4, 53 | container_hash -> Varchar, 54 | script_text -> Text, 55 | log_text -> Text, 56 | uuid -> Uuid, 57 | } 58 | } 59 | 60 | table! { 61 | packages (id) { 62 | id -> Int4, 63 | name -> Varchar, 64 | version -> Varchar, 65 | } 66 | } 67 | 68 | table! { 69 | release_stores (id) { 70 | id -> Int4, 71 | store_name -> Varchar, 72 | } 73 | } 74 | 75 | table! { 76 | releases (id) { 77 | id -> Int4, 78 | artifact_id -> Int4, 79 | release_date -> Timestamptz, 80 | release_store_id -> Int4, 81 | } 82 | } 83 | 84 | table! { 85 | submit_envs (id) { 86 | id -> Int4, 87 | submit_id -> Int4, 88 | env_id -> Int4, 89 | } 90 | } 91 | 92 | table! { 93 | submits (id) { 94 | id -> Int4, 95 | uuid -> Uuid, 96 | submit_time -> Timestamptz, 97 | requested_image_id -> Int4, 98 | requested_package_id -> Int4, 99 | repo_hash_id -> Int4, 100 | } 101 | } 102 | 103 | joinable!(artifacts -> jobs (job_id)); 104 | joinable!(job_envs -> envvars (env_id)); 105 | joinable!(job_envs -> jobs (job_id)); 106 | joinable!(jobs -> endpoints (endpoint_id)); 107 | joinable!(jobs -> images (image_id)); 108 | joinable!(jobs -> packages (package_id)); 109 | joinable!(jobs -> submits (submit_id)); 110 | joinable!(releases -> artifacts (artifact_id)); 111 | joinable!(releases -> release_stores (release_store_id)); 112 | joinable!(submit_envs -> envvars (env_id)); 113 | joinable!(submit_envs -> submits (submit_id)); 114 | joinable!(submits -> githashes (repo_hash_id)); 115 | joinable!(submits -> images (requested_image_id)); 116 | joinable!(submits -> packages (requested_package_id)); 117 | 118 | allow_tables_to_appear_in_same_query!( 119 | artifacts, 120 | endpoints, 121 | envvars, 122 | githashes, 123 | images, 124 | job_envs, 125 | jobs, 126 | packages, 127 | release_stores, 128 | releases, 129 | submit_envs, 130 | submits, 131 | ); 132 | -------------------------------------------------------------------------------- /src/commands/what_depends.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'what_depends' subcommand 12 | 13 | use std::io::Write; 14 | 15 | use anyhow::Result; 16 | use clap::ArgMatches; 17 | use futures::stream::StreamExt; 18 | use futures::stream::TryStreamExt; 19 | use resiter::Filter; 20 | use resiter::Map; 21 | use tracing::trace; 22 | 23 | use crate::commands::util::getbool; 24 | use crate::config::*; 25 | use crate::package::PackageName; 26 | use crate::repository::Repository; 27 | use crate::ui::*; 28 | 29 | /// Implementation of the "what_depends" subcommand 30 | pub async fn what_depends( 31 | matches: &ArgMatches, 32 | config: &Configuration, 33 | repo: Repository, 34 | ) -> Result<()> { 35 | use filters::failable::filter::FailableFilter; 36 | 37 | let print_runtime_deps = getbool( 38 | matches, 39 | "dependency_type", 40 | crate::cli::IDENT_DEPENDENCY_TYPE_RUNTIME, 41 | ); 42 | let print_build_deps = getbool( 43 | matches, 44 | "dependency_type", 45 | crate::cli::IDENT_DEPENDENCY_TYPE_BUILD, 46 | ); 47 | 48 | let package_filter = { 49 | let name = matches 50 | .get_one::("package_name") 51 | .map(|s| s.to_owned()) 52 | .map(PackageName::from) 53 | .unwrap(); 54 | 55 | crate::util::filters::build_package_filter_by_dependency_name( 56 | &name, 57 | print_build_deps, 58 | print_runtime_deps, 59 | ) 60 | }; 61 | 62 | let hb = crate::ui::handlebars_for_package_printing(config.package_print_format())?; 63 | let stdout = std::io::stdout(); 64 | let mut outlock = stdout.lock(); 65 | 66 | let flags = crate::ui::PackagePrintFlags { 67 | print_all: false, 68 | print_runtime_deps, 69 | print_build_deps, 70 | print_sources: false, 71 | print_dependencies: true, 72 | print_patches: false, 73 | print_env: false, 74 | print_flags: false, 75 | print_allowed_images: false, 76 | print_denied_images: false, 77 | print_phases: false, 78 | print_script: false, 79 | script_line_numbers: false, 80 | script_highlighting: false, 81 | }; 82 | 83 | let mut i = 0; 84 | let iter = repo 85 | .packages() 86 | .map(|package| package_filter.filter(package).map(|b| (b, package))) 87 | .filter_ok(|(b, _)| *b) 88 | .map_ok(|tpl| tpl.1) 89 | .inspect(|pkg| trace!("Found package: {:?}", pkg)) 90 | .map_ok(|p| { 91 | // poor mans enumerate_ok() 92 | i += 1; 93 | p.prepare_print(config, &flags, &hb, i) 94 | }); 95 | 96 | tokio_stream::iter(iter) 97 | .map(|pp| pp.and_then(|p| p.into_displayable())) 98 | .try_for_each(|p| { 99 | let r = writeln!(&mut outlock, "{p}").map_err(anyhow::Error::from); 100 | futures::future::ready(r) 101 | }) 102 | .await 103 | } 104 | -------------------------------------------------------------------------------- /examples/packages/README.md: -------------------------------------------------------------------------------- 1 | # Example package tree 2 | 3 | This subtree contains a large number of "packages", whereas each "package" is 4 | nothing more than a number piped to a file. 5 | This can be used to test out butido and how it works. 6 | 7 | 8 | ## The packages 9 | 10 | Each package has a single-letter name ('a'..'z') and a version number that 11 | increases for each package, so 'a' has version '1', and 'b' has version '2' and 12 | so on. 13 | 14 | The hierarchy of packages is described as follows: 15 | 16 | A 17 | + 18 | | 19 | +---------------+----------------+ 20 | | | 21 | v v 22 | B C 23 | + + 24 | | +--------------------+ 25 | +-------------+ | | | 26 | v v v v v v 27 | D E F+--------+ G H I 28 | + + + | + + 29 | | | | | | +---+---+ 30 | v v v +---+-+-+---+ v v 31 | J K L | | | | Q R 32 | + + + v v v v + 33 | +--+---+ | | M N O P | 34 | v v | v + + v 35 | S T +----->U | | V 36 | + | | + 37 | | | | | 38 | | | | v 39 | | | | W 40 | | | | + 41 | | | | | 42 | | | | v 43 | +-----------------+---+-------> X 44 | + 45 | | 46 | v 47 | Y 48 | + 49 | | 50 | v 51 | Z 52 | 53 | 54 | The important features here: 55 | 56 | * A simple chain of dependencies: Q -> V -> W -> X -> Y -> Z 57 | * DAG-Features (non-treeish dependencies): 58 | * F -> M 59 | * K -> U 60 | * O -> X 61 | * P -> X 62 | 63 | 64 | The order of builds should be opposite of the arrows, with builds for Z, R, H, 65 | N, M, T and S starting right away. 66 | 67 | 68 | Multiple versions of one package are not yet considered in this setup. 69 | 70 | 71 | # The packaging 72 | 73 | The packaging script does the following: 74 | 75 | * In the "sourcecheck" phase, it checks whether the input is present (that would 76 | be the .tar.gz file of the package to compile) 77 | * In the "depcheck" phase, it checks whether all dependencies are present. This 78 | is done to demonstrate what could be done, not because it makes a great deal 79 | of sense 80 | * In the "build" phase, it "builds" a package, by piping the version number of 81 | the package itself to the output file. 82 | 83 | -------------------------------------------------------------------------------- /src/db/models/job.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Context; 12 | use anyhow::Error; 13 | use anyhow::Result; 14 | use diesel::prelude::*; 15 | use tracing::trace; 16 | 17 | use crate::db::models::{Endpoint, Image, Package, Submit}; 18 | use crate::package::Script; 19 | use crate::schema::jobs; 20 | use crate::schema::jobs::*; 21 | use crate::util::docker::ContainerHash; 22 | 23 | #[derive(Debug, Eq, PartialEq, Identifiable, Queryable, Associations)] 24 | #[diesel(belongs_to(Submit))] 25 | #[diesel(belongs_to(Endpoint))] 26 | #[diesel(belongs_to(Package))] 27 | #[diesel(belongs_to(Image))] 28 | #[diesel(table_name = jobs)] 29 | pub struct Job { 30 | pub id: i32, 31 | pub submit_id: i32, 32 | pub endpoint_id: i32, 33 | pub package_id: i32, 34 | pub image_id: i32, 35 | pub container_hash: String, 36 | pub script_text: String, 37 | pub log_text: String, 38 | pub uuid: ::uuid::Uuid, 39 | } 40 | 41 | #[derive(Debug, Insertable)] 42 | #[diesel(table_name = jobs)] 43 | struct NewJob<'a> { 44 | pub submit_id: i32, 45 | pub endpoint_id: i32, 46 | pub package_id: i32, 47 | pub image_id: i32, 48 | pub container_hash: &'a str, 49 | pub script_text: String, 50 | pub log_text: String, 51 | pub uuid: &'a ::uuid::Uuid, 52 | } 53 | 54 | impl Job { 55 | #[allow(clippy::too_many_arguments)] 56 | pub fn create( 57 | database_connection: &mut PgConnection, 58 | job_uuid: &::uuid::Uuid, 59 | submit: &Submit, 60 | endpoint: &Endpoint, 61 | package: &Package, 62 | image: &Image, 63 | container: &ContainerHash, 64 | script: &Script, 65 | log: &str, 66 | ) -> Result { 67 | let new_job = NewJob { 68 | uuid: job_uuid, 69 | submit_id: submit.id, 70 | endpoint_id: endpoint.id, 71 | package_id: package.id, 72 | image_id: image.id, 73 | container_hash: container.as_ref(), 74 | script_text: script.as_ref().replace('\0', ""), 75 | log_text: log.replace('\0', ""), 76 | }; 77 | 78 | trace!("Creating Job in database: {:?}", new_job); 79 | let query = diesel::insert_into(jobs::table) 80 | .values(&new_job) 81 | .on_conflict_do_nothing(); 82 | 83 | trace!( 84 | "Query = {}", 85 | diesel::debug_query::(&query) 86 | ); 87 | 88 | database_connection.transaction::<_, Error, _>(|conn| { 89 | query.execute(conn).context("Creating job in database")?; 90 | 91 | dsl::jobs 92 | .filter(uuid.eq(job_uuid)) 93 | .first::(conn) 94 | .with_context(|| format!("Finding created job in database: {job_uuid}")) 95 | }) 96 | } 97 | 98 | pub fn env( 99 | &self, 100 | database_connection: &mut PgConnection, 101 | ) -> Result> { 102 | use crate::schema; 103 | 104 | schema::job_envs::table 105 | .inner_join(schema::envvars::table) 106 | .filter(schema::job_envs::job_id.eq(self.id)) 107 | .select(schema::envvars::all_columns) 108 | .load::(database_connection) 109 | .map_err(Error::from) 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/commands/dependencies_of.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'dependencies-of' subcommand 12 | 13 | use std::io::Write; 14 | 15 | use anyhow::Context; 16 | use anyhow::Result; 17 | use clap::ArgMatches; 18 | use futures::stream::StreamExt; 19 | use futures::stream::TryStreamExt; 20 | use tracing::trace; 21 | 22 | use crate::commands::util::getbool; 23 | use crate::config::*; 24 | use crate::package::PackageName; 25 | use crate::package::PackageVersionConstraint; 26 | use crate::repository::Repository; 27 | use crate::ui::*; 28 | 29 | /// Implementation of the "dependencies_of" subcommand 30 | pub async fn dependencies_of( 31 | matches: &ArgMatches, 32 | config: &Configuration, 33 | repo: Repository, 34 | ) -> Result<()> { 35 | use filters::filter::Filter; 36 | 37 | let package_filter = { 38 | let name = matches 39 | .get_one::("package_name") 40 | .map(|s| s.to_owned()) 41 | .map(PackageName::from) 42 | .unwrap(); 43 | trace!("Checking for package with name = {}", name); 44 | let version_constraint = matches 45 | .get_one::("package_version_constraint") 46 | .map(|s| s.to_owned()) 47 | .map(PackageVersionConstraint::try_from) 48 | .transpose() 49 | .context("Parsing package version constraint")?; 50 | trace!( 51 | "Checking for package with version constraint = {:?}", 52 | version_constraint 53 | ); 54 | 55 | crate::util::filters::build_package_filter_by_name(name).and( 56 | crate::util::filters::build_package_filter_by_version_constraint(version_constraint), 57 | ) 58 | }; 59 | 60 | let format = config.package_print_format(); 61 | let hb = crate::ui::handlebars_for_package_printing(format)?; 62 | let stdout = std::io::stdout(); 63 | let mut outlock = stdout.lock(); 64 | 65 | let print_runtime_deps = getbool( 66 | matches, 67 | "dependency_type", 68 | crate::cli::IDENT_DEPENDENCY_TYPE_RUNTIME, 69 | ); 70 | let print_build_deps = getbool( 71 | matches, 72 | "dependency_type", 73 | crate::cli::IDENT_DEPENDENCY_TYPE_BUILD, 74 | ); 75 | 76 | trace!( 77 | "Printing packages with format = '{}', runtime: {}, build: {}", 78 | format, 79 | print_runtime_deps, 80 | print_build_deps 81 | ); 82 | 83 | let flags = crate::ui::PackagePrintFlags { 84 | print_all: false, 85 | print_runtime_deps, 86 | print_build_deps, 87 | print_sources: false, 88 | print_dependencies: true, 89 | print_patches: false, 90 | print_env: false, 91 | print_flags: false, 92 | print_allowed_images: false, 93 | print_denied_images: false, 94 | print_phases: false, 95 | print_script: false, 96 | script_line_numbers: false, 97 | script_highlighting: false, 98 | }; 99 | 100 | let iter = repo 101 | .packages() 102 | .filter(|package| package_filter.filter(package)) 103 | .inspect(|pkg| trace!("Found package: {:?}", pkg)) 104 | .enumerate() 105 | .map(|(i, p)| p.prepare_print(config, &flags, &hb, i)); 106 | 107 | tokio_stream::iter(iter) 108 | .map(|pp| pp.into_displayable()) 109 | .try_for_each(|p| { 110 | let r = writeln!(&mut outlock, "{p}").map_err(anyhow::Error::from); 111 | futures::future::ready(r) 112 | }) 113 | .await 114 | } 115 | -------------------------------------------------------------------------------- /src/commands/find_pkg.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'find-pkg' subcommand 12 | 13 | use anyhow::Context; 14 | use anyhow::Result; 15 | use clap::ArgMatches; 16 | use futures::stream::StreamExt; 17 | use futures::stream::TryStreamExt; 18 | use tracing::trace; 19 | 20 | use crate::config::Configuration; 21 | use crate::package::PackageVersionConstraint; 22 | use crate::repository::Repository; 23 | use crate::ui::*; 24 | 25 | /// Implementation of the "find_pkg" subcommand 26 | pub async fn find_pkg( 27 | matches: &ArgMatches, 28 | config: &Configuration, 29 | repo: Repository, 30 | ) -> Result<()> { 31 | use std::io::Write; 32 | 33 | let package_name_regex = crate::commands::util::mk_package_name_regex({ 34 | matches.get_one::("package_name_regex").unwrap() // safe by clap 35 | })?; 36 | 37 | let package_version_constraint = matches 38 | .get_one::("package_version_constraint") 39 | .map(|s| s.to_owned()) 40 | .map(PackageVersionConstraint::try_from) 41 | .transpose() 42 | .context("Parsing package version constraint")?; 43 | 44 | let iter = repo 45 | .packages() 46 | .filter(|p| package_name_regex.captures(p.name()).is_some()) 47 | .filter(|p| { 48 | package_version_constraint 49 | .as_ref() 50 | .map(|v| v.matches(p.version())) 51 | .unwrap_or(true) 52 | }) 53 | .inspect(|pkg| trace!("Found package: {:?}", pkg)); 54 | 55 | let out = std::io::stdout(); 56 | let mut outlock = out.lock(); 57 | if matches.get_flag("terse") { 58 | for p in iter { 59 | writeln!(outlock, "{} {}", p.name(), p.version())?; 60 | } 61 | Ok(()) 62 | } else { 63 | let flags = crate::ui::PackagePrintFlags { 64 | print_all: matches.get_flag("show_all"), 65 | print_runtime_deps: crate::commands::util::getbool( 66 | matches, 67 | "dependency_type", 68 | crate::cli::IDENT_DEPENDENCY_TYPE_RUNTIME, 69 | ), 70 | print_build_deps: crate::commands::util::getbool( 71 | matches, 72 | "dependency_type", 73 | crate::cli::IDENT_DEPENDENCY_TYPE_BUILD, 74 | ), 75 | print_sources: matches.get_flag("show_sources"), 76 | print_dependencies: matches.get_flag("show_dependencies"), 77 | print_patches: matches.get_flag("show_patches"), 78 | print_env: matches.get_flag("show_env"), 79 | print_flags: matches.get_flag("show_flags"), 80 | print_allowed_images: matches.get_flag("show_allowed_images"), 81 | print_denied_images: matches.get_flag("show_denied_images"), 82 | print_phases: matches.get_flag("show_phases"), 83 | print_script: matches.get_flag("show_script"), 84 | script_line_numbers: !matches.get_flag("no_script_line_numbers"), 85 | script_highlighting: !matches.get_flag("no_script_highlight"), 86 | }; 87 | 88 | let format = config.package_print_format(); 89 | let hb = crate::ui::handlebars_for_package_printing(format)?; 90 | 91 | tokio_stream::iter({ 92 | iter.enumerate() 93 | .map(|(i, p)| p.prepare_print(config, &flags, &hb, i)) 94 | }) 95 | .map(|pp| pp.into_displayable()) 96 | .try_for_each(|p| { 97 | let r = writeln!(&mut outlock, "{p}").map_err(anyhow::Error::from); 98 | futures::future::ready(r) 99 | }) 100 | .await 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/db/connection.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::Error; 12 | use anyhow::Result; 13 | use clap::ArgMatches; 14 | use diesel::prelude::*; 15 | use diesel::r2d2::ConnectionManager; 16 | use diesel::r2d2::Pool; 17 | use getset::Getters; 18 | use tracing::debug; 19 | 20 | use crate::config::Configuration; 21 | 22 | #[derive(Getters)] 23 | pub struct DbConnectionConfig<'a> { 24 | #[getset(get = "pub")] 25 | database_host: &'a str, 26 | 27 | #[getset(get = "pub")] 28 | database_port: u16, 29 | 30 | #[getset(get = "pub")] 31 | database_user: &'a str, 32 | 33 | #[getset(get = "pub")] 34 | database_password: &'a str, 35 | 36 | #[getset(get = "pub")] 37 | database_name: &'a str, 38 | 39 | #[getset(get = "pub")] 40 | database_connection_timeout: u16, 41 | } 42 | 43 | impl std::fmt::Debug for DbConnectionConfig<'_> { 44 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { 45 | write!( 46 | f, 47 | "postgres://{user}:PASSWORD@{host}:{port}/{name}?connect_timeout={timeout}", 48 | host = self.database_host, 49 | port = self.database_port, 50 | user = self.database_user, 51 | name = self.database_name, 52 | timeout = self.database_connection_timeout 53 | ) 54 | } 55 | } 56 | 57 | impl<'a> DbConnectionConfig<'a> { 58 | pub fn parse(config: &'a Configuration, cli: &'a ArgMatches) -> Result> { 59 | Ok(DbConnectionConfig { 60 | database_host: cli 61 | .get_one::("database_host") 62 | .unwrap_or_else(|| config.database_host()), 63 | database_port: { 64 | *cli.get_one::("database_port") 65 | .unwrap_or_else(|| config.database_port()) 66 | }, 67 | database_user: cli 68 | .get_one::("database_user") 69 | .unwrap_or_else(|| config.database_user()), 70 | database_password: cli 71 | .get_one::("database_password") 72 | .unwrap_or_else(|| config.database_password()), 73 | database_name: cli 74 | .get_one::("database_name") 75 | .unwrap_or_else(|| config.database_name()), 76 | database_connection_timeout: { 77 | *cli.get_one::("database_connection_timeout") 78 | .unwrap_or_else(|| config.database_connection_timeout()) 79 | }, 80 | }) 81 | } 82 | 83 | fn get_database_uri(self) -> String { 84 | format!( 85 | "postgres://{user}:{password}@{host}:{port}/{name}?connect_timeout={timeout}", 86 | host = self.database_host, 87 | port = self.database_port, 88 | user = self.database_user, 89 | password = self.database_password, 90 | name = self.database_name, 91 | timeout = self.database_connection_timeout, 92 | ) 93 | } 94 | 95 | pub fn establish_connection(self) -> Result { 96 | debug!("Trying to connect to database: {:?}", self); 97 | PgConnection::establish(&self.get_database_uri()).map_err(Error::from) 98 | } 99 | 100 | pub fn establish_pool(self) -> Result>> { 101 | debug!( 102 | "Trying to create a connection pool for database: {:?}", 103 | self 104 | ); 105 | let manager = ConnectionManager::::new(self.get_database_uri()); 106 | Pool::builder() 107 | .min_idle(Some(1)) 108 | .build(manager) 109 | .map_err(Error::from) 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/config/util.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! This module contains default functions that are called by serde when deserializing the 12 | //! configuration and having to use default values. 13 | 14 | /// The default progress bar format 15 | // Ignore a false positive Clippy warning (we pass this format string to 16 | // `indicatif::ProgressBars` as `bar_template` instead of evaluating it here): 17 | #[rustversion::attr(since(1.85), allow(clippy::literal_string_with_formatting_args))] 18 | pub fn default_progress_format() -> String { 19 | String::from("{elapsed_precise} {percent:>3}% {bar:5.cyan/blue} | {msg}") 20 | } 21 | 22 | /// The default format that is used to print one package 23 | pub fn default_package_print_format() -> String { 24 | String::from(indoc::indoc!( 25 | r#" 26 | {{i}} - {{p.name}} : {{p.version}} 27 | {{~ #if print_any}} 28 | 29 | ================================== 30 | 31 | {{#if print_sources}} 32 | Sources: 33 | {{#each p.sources}} 34 | {{@key}} = {{this.url}} - {{this.hash.hash}} ({{this.hash.type}}) 35 | {{/each}} 36 | {{/if~}} 37 | 38 | {{#if print_dependencies}} 39 | Dependencies: 40 | {{#if print_build_deps ~}} 41 | {{#each p.dependencies.build}} 42 | {{this}} (build) 43 | {{/each}} 44 | {{/if}} 45 | {{#if print_runtime_deps ~}} 46 | {{#each p.dependencies.runtime}} 47 | {{this}} (runtime) 48 | {{/each}} 49 | {{/if}} 50 | {{/if~}} 51 | 52 | {{#if print_patches}} 53 | Patches: 54 | {{#each p.patches}} 55 | {{this}}, 56 | {{/each~}} 57 | {{/if~}} 58 | 59 | {{#if print_env}} 60 | Environment: 61 | {{#each p.environment}} 62 | {{@key}}={{this}} 63 | {{/each~}} 64 | {{/if~}} 65 | 66 | {{~#if print_flags}} 67 | Flags: 68 | {{#each p.flags}} 69 | {{this}} 70 | {{/each}} 71 | {{/if~}} 72 | 73 | {{~#if print_allowed_images}} 74 | Only supported on: 75 | {{#each p.allowed_images}} 76 | {{this}} 77 | {{/each}} 78 | {{/if~}} 79 | 80 | {{~#if print_denied_images}} 81 | Denied on: 82 | {{#each p.denied_images}} 83 | {{this}} 84 | {{/each}} 85 | {{/if~}} 86 | 87 | {{#if print_phases}} 88 | Phases: 89 | {{#each p.phases}} 90 | {{@key}} 91 | {{/each}} 92 | {{/if~}} 93 | 94 | {{~#if print_script}} 95 | {{script}} 96 | {{/if~}} 97 | {{~ /if ~}} 98 | "# 99 | )) 100 | } 101 | 102 | /// The default value for whether strict script interpolation should be used 103 | pub fn default_strict_script_interpolation() -> bool { 104 | true 105 | } 106 | 107 | /// The default value for the shebang 108 | pub fn default_script_shebang() -> String { 109 | String::from("#!/bin/bash") 110 | } 111 | 112 | /// The default value for the number of log lines that should be printed if a build fails 113 | pub fn default_build_error_lines() -> usize { 114 | 10 115 | } 116 | 117 | /// The default value for the database connection timeout (in seconds) 118 | pub fn default_database_connection_timeout() -> u16 { 119 | 30 120 | } 121 | 122 | /// The default value for the number of results/rows that should be returned for DB queries that 123 | /// list things (LIMIT) 124 | pub fn default_database_query_limit() -> usize { 125 | 10 126 | } 127 | -------------------------------------------------------------------------------- /doc/scripting.md: -------------------------------------------------------------------------------- 1 | ## Scripting 2 | 3 | This document describes the scripting in butido. 4 | 5 | First of all, the scripts butido runs are not set to be bash or any other 6 | programming language. 7 | Technically, writing your packaging scripts in anything that can be called with 8 | a shebang is possible (because butido takes the shebang from the config when 9 | compining the script). 10 | 11 | _BUT_ the scripts for all packages must be the same scripting language, it is 12 | not possible to write one package in bash and some other package in python and 13 | make butido automatically chose the right interpreter for the shebang. 14 | 15 | Besides from that, there are no hard requirements but only some that make your 16 | life easier. 17 | 18 | The following sections describe some output lines butido can parse and 19 | understand to get metadata for your build. 20 | 21 | These can be either printed via `echo` or via the script helpers provided for 22 | each kind of output. Note that the script helper is equivalent to writing the 23 | `echo` output yourself and is just added for convenience. 24 | 25 | Helpers for other scripting languages besides bash do not exist (yet?). 26 | 27 | 28 | ### State 29 | 30 | Your script can finish with two states. 31 | These states _MUST_ be set from your script, otherwise, butido will not be able 32 | to know whether your build was successfull or not. 33 | 34 | * Printed: 35 | `echo "#BUTIDO:STATE:OK"` for a successfull exit 36 | `echo '#BUTIDO:STATE:ERR:"errormessage"'` for erroneous exit 37 | * Helper 38 | `{{state "OK"}}` for a successfull exit 39 | `{{state "ERR" "message"}}` for erroneous exit 40 | 41 | 42 | ### Phases 43 | 44 | The configuration file features a field where you can define "phases" of the 45 | packaging script. These phases are nothing more than commented sections of your 46 | script. Butido concatenates all configured phases to one huge script. 47 | These phases can help you organizing what is happening, for example you can have 48 | a phase for unpacking the sources, one for preparing the build, one for building 49 | and finally one for packaging. 50 | The upper limit of number of phases is not restricted, but at least one must 51 | exist. 52 | 53 | Phases can be announced to the CLI frontend via printing 54 | 55 | * Bash: `echo '#BUTIDO:PHASE:'` 56 | * Helper: `{{phase ""}}` using the helper provided by butido. 57 | 58 | Only the latest phase will be shown to the user. 59 | The phase name will also be shown to the user if the packaging script fails, so 60 | they can find the location of the error faster. 61 | 62 | 63 | ### Progress 64 | 65 | The script can also print progress information to the CLI frontend. This 66 | progress information is nothing more than a number (`0..100`) that is used to 67 | update the progress bar. 68 | 69 | It can be updated using 70 | 71 | * Bash: `echo '#BUTIDO:PROGRESS:'` 72 | * Helper: `{{progress }}` 73 | 74 | This feature is completely a quality-of-life feature to give the caller of 75 | butido a visual feedback about the progress of a packaging script. 76 | For the packaging progress itself it is not required. 77 | 78 | 79 | (Butido might get functionality to infer the progress information based on 80 | earlier builds of the same package using heuristics. This might or might not 81 | deprecate this feature). 82 | 83 | 84 | ### Other helpers 85 | 86 | The (handlebars) templating engine we use to provide helpers for the package 87 | script building provides some basic helpers, e.g.: 88 | 89 | * if-else 90 | * each 91 | * boolean helpers for if conditions 92 | 93 | For a full list see 94 | [the handlebars documentation](https://docs.rs/handlebars/3.5.1/handlebars/#built-in-helpers). 95 | The "logging" feature of handlebars is _not_ enabled, thus, the logging helpers 96 | are not included. 97 | 98 | butido provides some more helpers: 99 | 100 | * `join` for joining several strings to one string: 101 | `{{join "foo" "bar}}` -> `foobar` 102 | Arguments can also be variables. 103 | 104 | * `joinwith` for joining several strings to one string seperated 105 | `{{joinwith ", " "foo" "bar}}` -> `foo, bar` 106 | Arguments can also be variables. 107 | 108 | -------------------------------------------------------------------------------- /.github/workflows/cargo.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request, merge_group] 2 | 3 | name: Cargo 4 | 5 | jobs: 6 | fmt: 7 | name: Fmt 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - name: Checkout sources 12 | uses: actions/checkout@v6 13 | 14 | - name: Install toolchain 15 | uses: dtolnay/rust-toolchain@v1 16 | with: 17 | toolchain: 1.88.0 # MSRV 18 | components: rustfmt 19 | 20 | - name: Run cargo fmt 21 | run: cargo fmt --check 22 | 23 | check: 24 | name: Check 25 | runs-on: ubuntu-latest 26 | strategy: 27 | matrix: 28 | rust: 29 | - 1.88.0 # MSRV 30 | - stable 31 | - beta 32 | 33 | steps: 34 | - name: Checkout sources 35 | uses: actions/checkout@v6 36 | 37 | - name: Install toolchain 38 | uses: dtolnay/rust-toolchain@v1 39 | with: 40 | toolchain: ${{ matrix.rust }} 41 | 42 | - uses: swatinem/rust-cache@v2 43 | with: 44 | shared-key: "ci" 45 | 46 | - name: Run cargo check 47 | run: cargo check --all-targets 48 | 49 | test: 50 | needs: [check] 51 | name: Test 52 | runs-on: ubuntu-latest 53 | strategy: 54 | matrix: 55 | rust: 56 | - 1.88.0 # MSRV 57 | - stable 58 | - beta 59 | steps: 60 | - name: Checkout sources 61 | uses: actions/checkout@v6 62 | 63 | - name: Install toolchain 64 | uses: dtolnay/rust-toolchain@v1 65 | with: 66 | toolchain: ${{ matrix.rust }} 67 | 68 | - uses: swatinem/rust-cache@v2 69 | with: 70 | shared-key: "ci" 71 | 72 | - name: Run cargo test 73 | run: cargo test 74 | 75 | deny: 76 | name: Deny 77 | runs-on: ubuntu-latest 78 | strategy: 79 | matrix: 80 | checks: 81 | - advisories 82 | - bans licenses sources 83 | 84 | # Prevent sudden announcement of a new advisory from failing CI: 85 | continue-on-error: ${{ matrix.checks == 'advisories' }} 86 | 87 | steps: 88 | - uses: actions/checkout@v6 89 | # https://github.com/EmbarkStudios/cargo-deny-action: 90 | - uses: EmbarkStudios/cargo-deny-action@v2 91 | with: 92 | command: check ${{ matrix.checks }} 93 | 94 | clippy: 95 | needs: [check] 96 | name: Clippy 97 | runs-on: ubuntu-latest 98 | continue-on-error: ${{ matrix.optional }} 99 | strategy: 100 | fail-fast: false 101 | matrix: 102 | include: 103 | - rust: 1.88.0 # MSRV 104 | optional: false 105 | - rust: beta 106 | optional: true 107 | steps: 108 | - uses: actions/checkout@v6 109 | - uses: dtolnay/rust-toolchain@v1 110 | with: 111 | toolchain: ${{ matrix.rust }} 112 | components: clippy 113 | - uses: swatinem/rust-cache@v2 114 | with: 115 | shared-key: "ci" 116 | - name: cargo clippy 117 | run: cargo clippy --all-targets -- -D warnings 118 | 119 | # This "accumulation" job is used as the required CI check for PRs. 120 | # We could require multiple jobs but the MSRV is subject to change and makes 121 | # it into the job names when using the matrix strategy (e.g., "Check (1.65.0)"). 122 | # This approach seems to be the easiest solution for now. 123 | # Ideally, it would be enough to set "needs" accordingly but GitHub will 124 | # accept the "skipped" status as a "success" when merging (which is very 125 | # bad!). Therefore, we need to always run this job and have to manually check 126 | # if all required jobs did return "success". 127 | result: 128 | name: Result 129 | if: ${{ always() }} 130 | runs-on: ubuntu-latest 131 | needs: 132 | - fmt 133 | - check 134 | - test 135 | - deny 136 | - clippy 137 | steps: 138 | - run: | 139 | echo "Error: A required CI check failed!" >&2 140 | exit 1 141 | if: >- 142 | ${{ 143 | contains(needs.*.result, 'failure') || 144 | contains(needs.*.result, 'cancelled') || 145 | contains(needs.*.result, 'skipped') 146 | }} 147 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BUTIDO 2 | 3 | "butido" could stand for "but i do", "BUild Things In DOcker" or 4 | "Better Universal Task Instrumentation Docker Observator". 5 | 6 | Anyways, it is a tool for building packages for Linux distributions in Docker 7 | and it does not make assumptions about the build procedure itself (and thus can 8 | build .rpm, .deb, or any other package format your scripts can build). 9 | 10 | 11 | ## Functionality 12 | 13 | Packages are defined in TOML and in hierarchies 14 | (see [config-rs](https://docs.rs/config/)). 15 | See the [examples](./examples) for how to define packages. 16 | 17 | The "business-logic" of packages are shell scripts which exist in predefined 18 | "phases". 19 | These scripts are compiled into one large script (per package) which is then 20 | run to build the source into a package. 21 | 22 | The package definition(s) can hold meta-information and (of course) information 23 | about a packages dependencies. Both dependencies and meta-information is made 24 | available in a build. 25 | 26 | Everything that is computed before, during or after a build or submit is written 27 | to a postgres database, including build logs. 28 | This database can be queried for packages, build information, logs and other 29 | data. 30 | 31 | Successfully built packages are collected in a "staging" store on FS. A staging 32 | store is created per submit. 33 | The results can be taken from this "staging" store and be released into a 34 | "release" store. 35 | 36 | 37 | ## Requirements 38 | 39 | Building butido is easy, assuming you have a Rust installation: 40 | 41 | ```bash 42 | cargo build --release # (remove --release for a debug build) 43 | ``` 44 | 45 | Butido is built and tested with Rust 1.88.0 as MSRV. 46 | 47 | 48 | ### (Development) Setup 49 | 50 | To set up a development infrastructure or a production infrastructure (using the 51 | examples from the `./examples/packages` directory): 52 | 53 | ```bash 54 | # pull down necessary Docker images 55 | docker pull debian:bullseye 56 | docker pull postgres:12 57 | 58 | # setup the database in a postgres container 59 | PG_USER=pgdev \ 60 | PG_PW=password \ 61 | PG_DB=butido \ 62 | PG_CONTAINER_NAME=butido-db \ 63 | bash scripts/dev-pg-container.sh 64 | 65 | # copy the examples to /tmp 66 | cd examples/packages 67 | make 68 | 69 | # Finish the database setup 70 | cd /tmp/butido-test-repo 71 | /path/to/butido db setup 72 | 73 | # Start building 74 | /path/to/butido build a --image debian:bullseye 75 | ``` 76 | 77 | 78 | ### Glossary 79 | 80 | | Word | Explanation | 81 | |-------------|------------------------------------------------------------------------------------------------------------------| 82 | | build / job | The procedure of transforming a set of sources to a package (or, technically, even to multiple packages) | 83 | | dependency | A "package" that is required during the buildtime or during the runtime of another "package" | 84 | | endpoint | A Docker API endpoint butido can talk to | 85 | | jobset | A list of jobs that can be run in any order or in parallel | 86 | | output | The results of a butido build job | 87 | | package | A single (archive) file OR the definition of a job | 88 | | script | The script that is run inside a container. Basically the "->" in "source -> package". | 89 | | source | A file that contains a source code archive | 90 | | submit | A call to butido for building a single package, which can result in multiple packages (dependencies) being built | 91 | | tree | The tree structure that is computed before a packages is built to find out all (transitive) dependencies | 92 | 93 | 94 | # Authors 95 | 96 | 97 | - Original author: Matthias Beyer @matthiasbeyer 98 | - Active maintainers: See `authors` in Cargo.toml 99 | - Passive maintainers 100 | - Erdmut Pfeifer @ErdmutPfeifer 101 | - Christoph Prokop @christophprokop 102 | 103 | 104 | # License 105 | 106 | butido was developed for science + computing AG (an Atos company). 107 | 108 | License: EPL-2.0 109 | -------------------------------------------------------------------------------- /src/source/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use std::path::PathBuf; 12 | 13 | use anyhow::anyhow; 14 | use anyhow::Context; 15 | use anyhow::Result; 16 | use tracing::trace; 17 | use url::Url; 18 | 19 | use crate::package::Package; 20 | use crate::package::PackageName; 21 | use crate::package::PackageVersion; 22 | use crate::package::Source; 23 | 24 | #[derive(Clone, Debug)] 25 | pub struct SourceCache { 26 | root: PathBuf, 27 | } 28 | 29 | impl SourceCache { 30 | pub fn new(root: PathBuf) -> Self { 31 | SourceCache { root } 32 | } 33 | 34 | pub fn sources_for(&self, p: &Package) -> Vec { 35 | SourceEntry::for_package(self.root.clone(), p) 36 | } 37 | } 38 | 39 | #[derive(Debug)] 40 | pub struct SourceEntry { 41 | cache_root: PathBuf, 42 | package_name: PackageName, 43 | package_version: PackageVersion, 44 | package_source_name: String, 45 | package_source: Source, 46 | } 47 | 48 | impl SourceEntry { 49 | fn source_file_directory(&self) -> PathBuf { 50 | self.cache_root 51 | .join(format!("{}-{}", self.package_name, self.package_version)) 52 | } 53 | 54 | fn for_package(cache_root: PathBuf, package: &Package) -> Vec { 55 | package 56 | .sources() 57 | .clone() 58 | .into_iter() 59 | .map(|(source_name, source)| SourceEntry { 60 | cache_root: cache_root.clone(), 61 | package_name: package.name().clone(), 62 | package_version: package.version().clone(), 63 | package_source_name: source_name, 64 | package_source: source, 65 | }) 66 | .collect() 67 | } 68 | 69 | pub fn path(&self) -> PathBuf { 70 | self.source_file_directory().join({ 71 | (self.package_source_name.as_ref() as &std::path::Path).with_extension("source") 72 | }) 73 | } 74 | 75 | pub fn url(&self) -> &Url { 76 | self.package_source.url() 77 | } 78 | 79 | pub fn download_manually(&self) -> bool { 80 | *self.package_source.download_manually() 81 | } 82 | 83 | pub async fn remove_file(&self) -> Result<()> { 84 | let p = self.path(); 85 | tokio::fs::remove_file(&p).await?; 86 | Ok(()) 87 | } 88 | 89 | pub async fn verify_hash(&self) -> Result<()> { 90 | let p = self.path(); 91 | trace!("Verifying : {}", p.display()); 92 | 93 | let reader = tokio::fs::OpenOptions::new() 94 | .create(false) 95 | .create_new(false) 96 | .read(true) 97 | .open(&p) 98 | .await 99 | .map(tokio::io::BufReader::new) 100 | .context("Opening file failed")?; 101 | 102 | trace!("Reader constructed for path: {}", p.display()); 103 | self.package_source.hash().matches_hash_of(reader).await 104 | } 105 | 106 | pub async fn create(&self) -> Result { 107 | let p = self.path(); 108 | trace!("Creating source file: {}", p.display()); 109 | 110 | if !self.cache_root.is_dir() { 111 | trace!("Cache root does not exist: {}", self.cache_root.display()); 112 | anyhow::bail!("Cache root {} does not exist!", self.cache_root.display()); 113 | } 114 | 115 | { 116 | let dir = self.source_file_directory(); 117 | if !dir.is_dir() { 118 | trace!("Creating directory: {}", dir.display()); 119 | tokio::fs::create_dir_all(&dir).await.with_context(|| { 120 | anyhow!( 121 | "Creating source cache directory for package {} {}: {}", 122 | self.package_source_name, 123 | self.package_source.hash().value(), 124 | dir.display() 125 | ) 126 | })?; 127 | } else { 128 | trace!("Directory exists: {}", dir.display()); 129 | } 130 | } 131 | 132 | trace!("Creating file now: {}", p.display()); 133 | tokio::fs::OpenOptions::new() 134 | .create_new(true) 135 | .write(true) 136 | .open(&p) 137 | .await 138 | .with_context(|| anyhow!("Creating file: {}", p.display())) 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/commands/tree_of.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'tree-of' subcommand 12 | 13 | use anyhow::Error; 14 | use anyhow::Result; 15 | use clap::ArgMatches; 16 | use petgraph::dot::Dot; 17 | use petgraph::visit::EdgeRef; 18 | use resiter::AndThen; 19 | 20 | use crate::config::Configuration; 21 | use crate::package::condition::ConditionData; 22 | use crate::package::Dag; 23 | use crate::package::DependencyType; 24 | use crate::package::PackageName; 25 | use crate::package::PackageVersionConstraint; 26 | use crate::repository::Repository; 27 | use crate::util::docker::ImageNameLookup; 28 | use crate::util::EnvironmentVariableName; 29 | 30 | /// Implementation of the "tree_of" subcommand 31 | pub async fn tree_of(matches: &ArgMatches, repo: Repository, config: &Configuration) -> Result<()> { 32 | let pname = matches 33 | .get_one::("package_name") 34 | .map(|s| s.to_owned()) 35 | .map(PackageName::from); 36 | let pvers = matches 37 | .get_one::("package_version_constraint") 38 | .map(|s| s.to_owned()) 39 | .map(PackageVersionConstraint::try_from) 40 | .transpose()?; 41 | 42 | let image_name_lookup = ImageNameLookup::create(config.docker().images())?; 43 | let image_name = matches 44 | .get_one::("image") 45 | .map(|s| image_name_lookup.expand(s)) 46 | .transpose()?; 47 | 48 | let additional_env = matches 49 | .get_many::("env") 50 | .unwrap_or_default() 51 | .map(AsRef::as_ref) 52 | .map(crate::util::env::parse_to_env) 53 | .collect::>>()?; 54 | 55 | let condition_data = ConditionData { 56 | image_name: image_name.as_ref(), 57 | env: &additional_env, 58 | }; 59 | 60 | let dot = matches.get_flag("dot"); 61 | 62 | let serial_buildorder = matches.get_flag("serial-buildorder"); 63 | 64 | repo.packages() 65 | .filter(|p| pname.as_ref().map(|n| p.name() == n).unwrap_or(true)) 66 | .filter(|p| { 67 | pvers 68 | .as_ref() 69 | .map(|v| v.matches(p.version())) 70 | .unwrap_or(true) 71 | }) 72 | .map(|package| Dag::for_root_package(package.clone(), &repo, None, &condition_data)) 73 | .and_then_ok(|dag| { 74 | if dot { 75 | let dot = Dot::with_attr_getters( 76 | dag.dag(), 77 | &[ 78 | petgraph::dot::Config::EdgeNoLabel, 79 | petgraph::dot::Config::NodeNoLabel, 80 | ], 81 | &|_, er| { 82 | format!( 83 | "{} ", 84 | match er.weight() { 85 | DependencyType::Build => "style = \"dotted\"", 86 | DependencyType::Runtime => "", 87 | } 88 | ) 89 | }, 90 | &|_, node| format!("label = \"{}\" ", node.1.display_name_version()), 91 | ); 92 | 93 | println!("{dot:?}"); 94 | Ok(()) 95 | } else if serial_buildorder { 96 | let topo_sorted = petgraph::algo::toposort(dag.dag(), None) 97 | .map_err(|_| Error::msg("Cyclic dependency found!"))?; 98 | 99 | for node in topo_sorted.iter().rev() { 100 | let package = dag.dag().node_weight(*node).unwrap(); 101 | 102 | // Check incoming edges to determine if it's a build dependency 103 | let is_build_dependency = dag 104 | .dag() 105 | .edges_directed(*node, petgraph::Direction::Incoming) 106 | .any(|edge| { 107 | let dep_type = dag.dag().edge_weight(edge.id()).unwrap(); 108 | *dep_type == DependencyType::Build 109 | }); 110 | 111 | println!( 112 | "{}{}", 113 | if is_build_dependency { "*" } else { "" }, 114 | package.display_name_version() 115 | ); 116 | } 117 | println!(); 118 | 119 | Ok(()) 120 | } else { 121 | let stdout = std::io::stdout(); 122 | let mut outlock = stdout.lock(); 123 | 124 | ptree::write_tree(&dag.display(), &mut outlock).map_err(Error::from) 125 | } 126 | }) 127 | .collect::>() 128 | } 129 | -------------------------------------------------------------------------------- /src/job/runnable.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::anyhow; 12 | use anyhow::Context; 13 | use anyhow::Result; 14 | use getset::Getters; 15 | use tracing::{debug, trace}; 16 | use uuid::Uuid; 17 | 18 | use crate::config::Configuration; 19 | use crate::filestore::ArtifactPath; 20 | use crate::job::Job; 21 | use crate::job::JobResource; 22 | use crate::package::Package; 23 | use crate::package::Script; 24 | use crate::package::ScriptBuilder; 25 | use crate::source::SourceCache; 26 | use crate::source::SourceEntry; 27 | use crate::util::docker::ImageName; 28 | use crate::util::EnvironmentVariableName; 29 | 30 | /// A job configuration that can be run. All inputs are clear here. 31 | #[derive(Clone, Debug, Getters)] 32 | pub struct RunnableJob { 33 | #[getset(get = "pub")] 34 | uuid: Uuid, 35 | 36 | #[getset(get = "pub")] 37 | package: Package, 38 | 39 | #[getset(get = "pub")] 40 | image: ImageName, 41 | 42 | #[getset(get = "pub")] 43 | source_cache: SourceCache, 44 | 45 | #[getset(get = "pub")] 46 | script: Script, 47 | 48 | #[getset(get = "pub")] 49 | resources: Vec, 50 | } 51 | 52 | impl RunnableJob { 53 | pub fn build_from_job( 54 | job: &Job, 55 | source_cache: &SourceCache, 56 | config: &Configuration, 57 | git_author_env: Option<&(EnvironmentVariableName, String)>, 58 | git_commit_env: Option<&(EnvironmentVariableName, String)>, 59 | dependencies: Vec, 60 | ) -> Result { 61 | if config.containers().check_env_names() { 62 | debug!("Checking environment if all variables are allowed!"); 63 | job.resources() 64 | .iter() 65 | .filter_map(|r| r.env()) 66 | .chain({ 67 | job.package() 68 | .environment() 69 | .as_ref() 70 | .map(|hm| hm.iter()) 71 | .into_iter() 72 | .flatten() 73 | }) 74 | .chain(git_author_env.as_ref().into_iter().map(|(k, v)| (k, v))) 75 | .chain(git_commit_env.as_ref().into_iter().map(|(k, v)| (k, v))) 76 | .inspect(|(name, _)| debug!("Checking: {}", name)) 77 | .try_for_each(|(name, _)| { 78 | trace!( 79 | "{:?} contains? {:?}", 80 | config.containers().allowed_env(), 81 | name 82 | ); 83 | if !config.containers().allowed_env().contains(name) { 84 | Err(anyhow!("Environment variable name not allowed: {name}")) 85 | } else { 86 | Ok(()) 87 | } 88 | }) 89 | .with_context(|| { 90 | anyhow!( 91 | "Checking allowed variables for package {} {}", 92 | job.package().name(), 93 | job.package().version() 94 | ) 95 | }) 96 | .context("Checking allowed variable names")?; 97 | } else { 98 | debug!("Environment checking disabled"); 99 | } 100 | 101 | let resources = dependencies 102 | .into_iter() 103 | .map(JobResource::from) 104 | .chain({ 105 | job.resources() 106 | .iter() 107 | .filter(|jr| jr.env().is_some()) 108 | .cloned() 109 | }) 110 | .chain(git_author_env.into_iter().cloned().map(JobResource::from)) 111 | .chain(git_commit_env.into_iter().cloned().map(JobResource::from)) 112 | .collect(); 113 | 114 | debug!("Building script now"); 115 | let script = ScriptBuilder::new(job.script_shebang()).build( 116 | job.package(), 117 | job.script_phases(), 118 | *config.strict_script_interpolation(), 119 | )?; 120 | 121 | Ok(RunnableJob { 122 | uuid: *job.uuid(), 123 | package: job.package().clone(), 124 | image: job.image().clone(), 125 | resources, 126 | source_cache: source_cache.clone(), 127 | 128 | script, 129 | }) 130 | } 131 | 132 | pub fn package_sources(&self) -> Vec { 133 | self.source_cache.sources_for(self.package()) 134 | } 135 | 136 | pub fn environment(&self) -> impl Iterator { 137 | self.resources.iter().filter_map(|r| r.env()).chain({ 138 | self.package() 139 | .environment() 140 | .as_ref() 141 | .map(|hm| hm.iter()) 142 | .into_iter() 143 | .flatten() 144 | }) 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /src/commands/metrics.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | //! Implementation of the 'metrics' subcommand 12 | 13 | use std::io::Write; 14 | use std::path::Path; 15 | 16 | use anyhow::Error; 17 | use anyhow::Result; 18 | use diesel::r2d2::ConnectionManager; 19 | use diesel::r2d2::Pool; 20 | use diesel::PgConnection; 21 | use diesel::QueryDsl; 22 | use diesel::RunQueryDsl; 23 | use walkdir::WalkDir; 24 | 25 | use crate::config::Configuration; 26 | use crate::repository::Repository; 27 | 28 | pub async fn metrics( 29 | repo_path: &Path, 30 | config: &Configuration, 31 | repo: Repository, 32 | pool: Pool>, 33 | ) -> Result<()> { 34 | let mut out = std::io::stdout(); 35 | 36 | let nfiles = WalkDir::new(repo_path) 37 | .follow_links(true) 38 | .into_iter() 39 | .filter_map(Result::ok) 40 | .filter(|d| d.file_type().is_file()) 41 | .filter(|f| { 42 | f.path() 43 | .file_name() 44 | .map(|name| name == "pkg.toml") 45 | .unwrap_or(false) 46 | }) 47 | .count(); 48 | 49 | let n_artifacts = async { 50 | crate::schema::artifacts::table 51 | .count() 52 | .get_result::(&mut pool.get().unwrap()) 53 | }; 54 | let n_endpoints = async { 55 | crate::schema::endpoints::table 56 | .count() 57 | .get_result::(&mut pool.get().unwrap()) 58 | }; 59 | let n_envvars = async { 60 | crate::schema::envvars::table 61 | .count() 62 | .get_result::(&mut pool.get().unwrap()) 63 | }; 64 | let n_githashes = async { 65 | crate::schema::githashes::table 66 | .count() 67 | .get_result::(&mut pool.get().unwrap()) 68 | }; 69 | let n_images = async { 70 | crate::schema::images::table 71 | .count() 72 | .get_result::(&mut pool.get().unwrap()) 73 | }; 74 | let n_jobs = async { 75 | crate::schema::jobs::table 76 | .count() 77 | .get_result::(&mut pool.get().unwrap()) 78 | }; 79 | let n_packages = async { 80 | crate::schema::packages::table 81 | .count() 82 | .get_result::(&mut pool.get().unwrap()) 83 | }; 84 | let n_releasestores = async { 85 | crate::schema::release_stores::table 86 | .count() 87 | .get_result::(&mut pool.get().unwrap()) 88 | }; 89 | let n_releases = async { 90 | crate::schema::releases::table 91 | .count() 92 | .get_result::(&mut pool.get().unwrap()) 93 | }; 94 | let n_submits = async { 95 | crate::schema::submits::table 96 | .count() 97 | .get_result::(&mut pool.get().unwrap()) 98 | }; 99 | 100 | let ( 101 | n_artifacts, 102 | n_endpoints, 103 | n_envvars, 104 | n_githashes, 105 | n_images, 106 | n_jobs, 107 | n_packages, 108 | n_releasestores, 109 | n_releases, 110 | n_submits, 111 | ) = tokio::try_join!( 112 | n_artifacts, 113 | n_endpoints, 114 | n_envvars, 115 | n_githashes, 116 | n_images, 117 | n_jobs, 118 | n_packages, 119 | n_releasestores, 120 | n_releases, 121 | n_submits 122 | )?; 123 | 124 | write!( 125 | out, 126 | "{}", 127 | indoc::formatdoc!( 128 | r#" 129 | Butido release {release} 130 | 131 | Configuration: 132 | - {configured_endpoints} endpoints 133 | - {configured_images} images 134 | - {configured_release_stores} release stores 135 | - {configured_phases} phases 136 | 137 | Repository: 138 | - {nfiles} files 139 | - {repo_packages} packages 140 | 141 | Database: 142 | - {n_artifacts} artifacts 143 | - {n_endpoints} endpoints 144 | - {n_envvars} envvars 145 | - {n_githashes} githashes 146 | - {n_images} images 147 | - {n_jobs} jobs 148 | - {n_packages} packages 149 | - {n_releasestores} releasestores 150 | - {n_releases} releases 151 | - {n_submits} submits 152 | "#, 153 | release = clap::crate_version!(), 154 | configured_endpoints = config.docker().endpoints().len(), 155 | configured_images = config.docker().images().len(), 156 | configured_release_stores = config.release_stores().len(), 157 | configured_phases = config.available_phases().len(), 158 | nfiles = nfiles, 159 | repo_packages = repo.packages().count(), 160 | n_artifacts = n_artifacts, 161 | n_endpoints = n_endpoints, 162 | n_envvars = n_envvars, 163 | n_githashes = n_githashes, 164 | n_images = n_images, 165 | n_jobs = n_jobs, 166 | n_packages = n_packages, 167 | n_releasestores = n_releasestores, 168 | n_releases = n_releases, 169 | n_submits = n_submits, 170 | ) 171 | ) 172 | .map_err(Error::from) 173 | } 174 | -------------------------------------------------------------------------------- /src/package/dependency/mod.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::anyhow; 12 | use anyhow::Result; 13 | use lazy_static::lazy_static; 14 | use regex::Regex; 15 | 16 | use crate::package::PackageName; 17 | use crate::package::PackageVersion; 18 | 19 | mod build; 20 | pub use build::*; 21 | 22 | mod runtime; 23 | pub use runtime::*; 24 | 25 | pub mod condition; 26 | 27 | pub trait ParseDependency { 28 | fn parse_as_name_and_version(&self) -> Result<(PackageName, PackageVersion)>; 29 | } 30 | 31 | lazy_static! { 32 | // The following regex could be simplified significantly since we basically only need the space 33 | // (" ") for splitting name and version (and both shouldn't be empty) - the rest of the 34 | // validation could and probably should be done when parsing `name` and `version` (can make the 35 | // errors more precise and we avoid that the regex diverges from the rest of the validation as 36 | // it's already the case): 37 | pub(in crate::package::dependency) static ref DEPENDENCY_PARSING_RE: Regex = 38 | Regex::new("^(?P[[:alnum:]][[:alnum:]._-]*) (?P[*=><]?[[:alnum:]][[:alnum:][:punct:]]*)$").unwrap(); 39 | } 40 | 41 | /// Helper function for the actual implementation of the ParseDependency trait. 42 | /// 43 | /// TODO: Reimplement using pom crate 44 | pub(in crate::package::dependency) fn parse_package_dependency_string_into_name_and_version( 45 | s: &str, 46 | ) -> Result<(PackageName, PackageVersion)> { 47 | let caps = crate::package::dependency::DEPENDENCY_PARSING_RE 48 | .captures(s) 49 | .ok_or_else(|| anyhow!("Could not parse into package name and package version: '{s}'"))?; 50 | 51 | let name = caps 52 | .name("name") 53 | .map(|m| String::from(m.as_str())) 54 | .ok_or_else(|| anyhow!("Could not parse name: '{s}'"))?; 55 | 56 | let vers = caps 57 | .name("version") 58 | .map(|m| String::from(m.as_str())) 59 | .ok_or_else(|| anyhow!("Could not parse version: '{s}'"))?; 60 | 61 | // TODO: This is here temporarily to keep the version validation: 62 | let _ = crate::package::PackageVersionConstraint::try_from(vers.clone()).map_err(|e| { 63 | e.context(anyhow!( 64 | "Could not parse the following package dependency string: {s}" 65 | )) 66 | })?; 67 | Ok((PackageName::from(name), PackageVersion::from(vers))) 68 | } 69 | 70 | #[cfg(test)] 71 | mod tests { 72 | use super::*; 73 | 74 | use crate::package::PackageVersion; 75 | 76 | // 77 | // helper functions 78 | // 79 | 80 | fn dep_parse_test(name: &'static str, version: &'static str) { 81 | let name = name.to_string(); 82 | let version = version.to_string(); 83 | 84 | let dependency_specification = format!("{name} ={version}"); 85 | let dep = Dependency::from(dependency_specification.clone()); 86 | let (dep_name, dep_version) = dep.parse_as_name_and_version().unwrap(); 87 | 88 | let version = PackageVersion::from(version); 89 | assert_eq!( 90 | dep_name, 91 | PackageName::from(name), 92 | "Name check failed for input: {dependency_specification}" 93 | ); 94 | assert_eq!( 95 | dep_version, version, 96 | "Version check failed for input: {dependency_specification}" 97 | ); 98 | } 99 | 100 | fn dep_parse_expect_err(dependency_specification: &'static str) { 101 | let dep = Dependency::from(dependency_specification.to_string()); 102 | let result = dep.parse_as_name_and_version(); 103 | assert!( 104 | result.is_err(), 105 | "Should not be able to parse this input: {dependency_specification}" 106 | ); 107 | } 108 | 109 | // 110 | // tests 111 | // 112 | 113 | #[test] 114 | fn test_dependency_conversion_1() { 115 | dep_parse_test("vim", "8.2"); 116 | } 117 | 118 | #[test] 119 | fn test_dependency_conversion_2() { 120 | dep_parse_test("gtk15", "1b"); 121 | } 122 | 123 | #[test] 124 | fn test_dependency_string_with_punctuation() { 125 | dep_parse_test("foo-bar1.2.3", "0.123"); 126 | } 127 | 128 | #[test] 129 | fn test_dependency_string_where_pkg_starts_with_number() { 130 | dep_parse_test("7z", "42"); 131 | } 132 | 133 | #[test] 134 | fn test_dependency_version_with_constraint() { 135 | let name = "foobar"; 136 | let version_constraint = "=1.42.37"; 137 | 138 | let dep = Dependency::from(format!("{name} {version_constraint}")); 139 | let (dep_name, dep_version) = dep.parse_as_name_and_version().unwrap(); 140 | 141 | assert_eq!(dep_name, PackageName::from(name.to_string())); 142 | assert_eq!( 143 | dep_version, 144 | PackageVersion::from(version_constraint.to_string()), 145 | ); 146 | } 147 | 148 | #[test] 149 | fn test_complex_dependency_parsing() { 150 | dep_parse_test("0ad_", "42"); 151 | dep_parse_test("2048-cli_0.0", "42"); 152 | 153 | dep_parse_expect_err("0] =42"); 154 | dep_parse_expect_err("a\\ =42"); 155 | dep_parse_expect_err("a =.0"); 156 | dep_parse_expect_err("a ="); 157 | dep_parse_expect_err(""); 158 | dep_parse_expect_err(" "); 159 | // Not supported yet: 160 | dep_parse_expect_err("a *"); 161 | dep_parse_expect_err("a >2"); 162 | dep_parse_expect_err("a <2"); 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/package/source.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2020-2022 science+computing ag and other contributors 3 | // 4 | // This program and the accompanying materials are made 5 | // available under the terms of the Eclipse Public License 2.0 6 | // which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | // 8 | // SPDX-License-Identifier: EPL-2.0 9 | // 10 | 11 | use anyhow::anyhow; 12 | use anyhow::Context; 13 | use anyhow::Result; 14 | use getset::Getters; 15 | use serde::Deserialize; 16 | use serde::Serialize; 17 | use tracing::trace; 18 | use url::Url; 19 | 20 | fn default_download_manually() -> bool { 21 | false 22 | } 23 | 24 | #[derive(Clone, Debug, Serialize, Deserialize, Getters)] 25 | pub struct Source { 26 | #[getset(get = "pub")] 27 | url: Url, 28 | #[getset(get = "pub")] 29 | hash: SourceHash, 30 | 31 | // This is only required for some special packages that cannot be downloaded automatically for 32 | // various reasons so it defaults to `false`: 33 | #[serde(default = "default_download_manually")] 34 | #[getset(get = "pub")] 35 | download_manually: bool, 36 | } 37 | 38 | impl Source { 39 | #[cfg(test)] 40 | pub fn new(url: Url, hash: SourceHash) -> Self { 41 | Source { 42 | url, 43 | hash, 44 | download_manually: false, 45 | } 46 | } 47 | } 48 | 49 | #[derive(Clone, Debug, Serialize, Deserialize, Getters)] 50 | pub struct SourceHash { 51 | #[serde(rename = "type")] 52 | #[getset(get = "pub")] 53 | hashtype: HashType, 54 | 55 | #[serde(rename = "hash")] 56 | #[getset(get = "pub")] 57 | value: HashValue, 58 | } 59 | 60 | impl SourceHash { 61 | pub async fn matches_hash_of(&self, reader: R) -> Result<()> { 62 | trace!("Hashing buffer with: {:?}", self.hashtype); 63 | let h = self 64 | .hashtype 65 | .hash_from_reader(reader) 66 | .await 67 | .context("Hashing failed")?; 68 | trace!("Hashing buffer with: {} finished", self.hashtype); 69 | 70 | if h == self.value { 71 | trace!("Hash matches expected hash"); 72 | Ok(()) 73 | } else { 74 | trace!("Hash mismatch expected hash"); 75 | Err(anyhow!( 76 | "Hash mismatch, expected '{}', got '{}'", 77 | self.value, 78 | h 79 | )) 80 | } 81 | } 82 | 83 | #[cfg(test)] 84 | pub fn new(hashtype: HashType, value: HashValue) -> Self { 85 | SourceHash { hashtype, value } 86 | } 87 | } 88 | 89 | #[derive(parse_display::Display, Clone, Debug, Serialize, Deserialize)] 90 | pub enum HashType { 91 | #[serde(rename = "sha1")] 92 | #[display("sha1")] 93 | Sha1, 94 | 95 | #[serde(rename = "sha256")] 96 | #[display("sha256")] 97 | Sha256, 98 | 99 | #[serde(rename = "sha512")] 100 | #[display("sha512")] 101 | Sha512, 102 | } 103 | 104 | impl HashType { 105 | async fn hash_from_reader( 106 | &self, 107 | mut reader: R, 108 | ) -> Result { 109 | use tokio::io::AsyncReadExt; 110 | 111 | let mut buffer = [0; 1024]; 112 | 113 | match self { 114 | HashType::Sha1 => { 115 | use sha1::Digest; 116 | 117 | trace!("SHA1 hashing buffer"); 118 | let mut m = sha1::Sha1::new(); 119 | loop { 120 | let count = reader 121 | .read(&mut buffer) 122 | .await 123 | .context("Reading buffer failed")?; 124 | 125 | if count == 0 { 126 | trace!("ready"); 127 | break; 128 | } 129 | 130 | m.update(&buffer[..count]); 131 | } 132 | Ok(HashValue(format!("{:x}", m.finalize()))) 133 | } 134 | HashType::Sha256 => { 135 | use sha2::Digest; 136 | 137 | trace!("SHA256 hashing buffer"); 138 | let mut m = sha2::Sha256::new(); 139 | loop { 140 | let count = reader 141 | .read(&mut buffer) 142 | .await 143 | .context("Reading buffer failed")?; 144 | 145 | if count == 0 { 146 | trace!("ready"); 147 | break; 148 | } 149 | 150 | m.update(&buffer[..count]); 151 | } 152 | let h = format!("{:x}", m.finalize()); 153 | trace!("Hash = {:?}", h); 154 | Ok(HashValue(h)) 155 | } 156 | HashType::Sha512 => { 157 | use sha2::Digest; 158 | 159 | trace!("SHA512 hashing buffer"); 160 | let mut m = sha2::Sha512::new(); 161 | loop { 162 | let count = reader 163 | .read(&mut buffer) 164 | .await 165 | .context("Reading buffer failed")?; 166 | 167 | if count == 0 { 168 | trace!("ready"); 169 | break; 170 | } 171 | 172 | m.update(&buffer[..count]); 173 | } 174 | let h = format!("{:x}", m.finalize()); 175 | trace!("Hash = {:?}", h); 176 | Ok(HashValue(h)) 177 | } 178 | } 179 | } 180 | } 181 | 182 | #[derive(parse_display::Display, Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)] 183 | #[serde(transparent)] 184 | #[display("{0}")] 185 | pub struct HashValue(String); 186 | 187 | #[cfg(test)] 188 | impl From for HashValue { 189 | fn from(s: String) -> Self { 190 | HashValue(s) 191 | } 192 | } 193 | --------------------------------------------------------------------------------