├── LICENSE ├── README.md ├── besadii ├── default.nix └── main.go ├── buildGo ├── .skip-subtree ├── README.md ├── default.nix ├── example │ ├── default.nix │ ├── lib.go │ └── main.go └── external │ ├── default.nix │ └── main.go ├── buildkite.yml ├── buildkite ├── default.nix └── fetch-parent-targets.sh ├── checks └── default.nix ├── default.nix ├── dependency-analyzer ├── default.nix ├── examples │ ├── ci-targets.nix │ └── lisp.nix └── tests │ └── default.nix ├── lazy-deps └── default.nix ├── magrathea ├── default.nix └── mg.scm ├── nixpkgs └── default.nix ├── readTree ├── README.md ├── default.nix └── tests │ ├── .skip-subtree │ ├── default.nix │ ├── test-example │ ├── third_party │ │ ├── default.nix │ │ └── rustpkgs │ │ │ ├── aho-corasick.nix │ │ │ └── serde.nix │ └── tools │ │ ├── cheddar │ │ └── default.nix │ │ └── roquefort.nix │ ├── test-marker │ ├── directory-marked │ │ ├── default.nix │ │ └── nested │ │ │ └── default.nix │ └── file-children │ │ ├── one.nix │ │ └── two.nix │ ├── test-tree-traversal │ ├── default-nix │ │ ├── can-be-drv │ │ │ ├── default.nix │ │ │ └── subdir │ │ │ │ └── a.nix │ │ ├── default.nix │ │ ├── no-merge │ │ │ ├── default.nix │ │ │ └── subdir │ │ │ │ └── a.nix │ │ ├── sibling.nix │ │ └── subdir │ │ │ └── a.nix │ ├── no-skip-subtree │ │ ├── a │ │ │ └── default.nix │ │ ├── b │ │ │ └── c.nix │ │ └── default.nix │ ├── skip-subtree │ │ ├── .skip-subtree │ │ ├── a │ │ │ └── default.nix │ │ ├── b │ │ │ └── c.nix │ │ └── default.nix │ └── skip-tree │ │ ├── a │ │ └── default.nix │ │ └── b │ │ ├── .skip-tree │ │ └── default.nix │ ├── test-wrong-no-dots │ └── no-dots-in-function.nix │ └── test-wrong-not-a-function │ └── not-a-function.nix ├── sources ├── default.nix └── sources.json ├── stateMonad ├── default.nix └── tests │ └── default.nix └── workspace.josh /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 Vincent Ambo 4 | Copyright (c) 2020-2023 The TVL Authors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | The TVL Kit 2 | =========== 3 | 4 | [![Build status](https://badge.buildkite.com/cd7240a881c7e77c3ed8cc040f81734623f57038563b37213d.svg?branch=canon)](https://buildkite.com/tvl/tvl-kit) 5 | 6 | This folder contains a publicly available version of the core TVL 7 | tooling, currently comprising of: 8 | 9 | * `buildkite`: TVL tooling for dynamically generating Buildkite 10 | pipelines with Nix. 11 | * `buildGo`: Nix-based build system for Go. 12 | * `readTree`: Nix library to dynamically compute attribute trees 13 | corresponding to the physical layout of a repository. 14 | * `besadii`: Configurable Gerrit/Buildkite integration hook. 15 | * `magrathea`: Command-line tool for working with TVL-style monorepos 16 | * `checks`: Collection of useful CI checks for Buildkite 17 | 18 | It can be accessed via git by cloning it as such: 19 | 20 | git clone https://code.tvl.fyi/depot.git:unsign:workspace=views/kit.git tvl-kit 21 | 22 | If you are looking at this within the TVL depot, you can see the 23 | [josh][] configuration in `workspace.josh`. You will find the projects 24 | at slightly different paths within the depot. 25 | 26 | [josh]: https://github.com/josh-project/josh/ 27 | -------------------------------------------------------------------------------- /besadii/default.nix: -------------------------------------------------------------------------------- 1 | # This program is used as a Gerrit hook to trigger builds on 2 | # Buildkite and perform other maintenance tasks. 3 | { depot, ... }: 4 | 5 | depot.nix.buildGo.program { 6 | name = "besadii"; 7 | srcs = [ ./main.go ]; 8 | } 9 | -------------------------------------------------------------------------------- /besadii/main.go: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2020 Google LLC. 2 | // SPDX-License-Identifier: Apache-2.0 3 | // 4 | // besadii is a small CLI tool that is invoked as a hook by various 5 | // programs to cause CI-related actions. 6 | // 7 | // It supports the following modes & operations: 8 | // 9 | // Gerrit (ref-updated) hook: 10 | // - Trigger Buildkite CI builds 11 | // 12 | // Buildkite (post-command) hook: 13 | // - Submit CL verification status back to Gerrit 14 | package main 15 | 16 | import ( 17 | "bytes" 18 | "encoding/json" 19 | "flag" 20 | "fmt" 21 | "io" 22 | "log/syslog" 23 | "net/http" 24 | "net/mail" 25 | "os" 26 | "os/user" 27 | "path" 28 | "regexp" 29 | "strconv" 30 | "strings" 31 | ) 32 | 33 | // Regular expression to extract change ID out of a URL 34 | var changeIdRegexp = regexp.MustCompile(`^.*/(\d+)$`) 35 | 36 | // Regular expression to check if gerritChangeName valid. The 37 | // limitation could be what is allowed for a git branch name. For now 38 | // we want to have a stricter limitation for readability and ease of 39 | // use. 40 | var gerritChangeNameRegexp = `^[a-z0-9]+$` 41 | var gerritChangeNameCheck = regexp.MustCompile(gerritChangeNameRegexp) 42 | 43 | // besadii configuration file structure 44 | type config struct { 45 | // Required configuration for Buildkite<>Gerrit monorepo 46 | // integration. 47 | Repository string `json:"repository"` 48 | Branch string `json:"branch"` 49 | GerritUrl string `json:"gerritUrl"` 50 | GerritUser string `json:"gerritUser"` 51 | GerritPassword string `json:"gerritPassword"` 52 | GerritLabel string `json:"gerritLabel"` 53 | BuildkiteOrg string `json:"buildkiteOrg"` 54 | BuildkiteProject string `json:"buildkiteProject"` 55 | BuildkiteToken string `json:"buildkiteToken"` 56 | GerritChangeName string `json:"gerritChangeName"` 57 | } 58 | 59 | // buildTrigger represents the information passed to besadii when it 60 | // is invoked as a Gerrit hook. 61 | // 62 | // https://gerrit.googlesource.com/plugins/hooks/+/HEAD/src/main/resources/Documentation/hooks.md 63 | type buildTrigger struct { 64 | project string 65 | ref string 66 | commit string 67 | author string 68 | email string 69 | 70 | changeId string 71 | patchset string 72 | } 73 | 74 | type Author struct { 75 | Name string `json:"name"` 76 | Email string `json:"email"` 77 | } 78 | 79 | // Build is the representation of a Buildkite build as described on 80 | // https://buildkite.com/docs/apis/rest-api/builds#create-a-build 81 | type Build struct { 82 | Commit string `json:"commit"` 83 | Branch string `json:"branch"` 84 | Author Author `json:"author"` 85 | Env map[string]string `json:"env"` 86 | } 87 | 88 | // BuildResponse is the representation of Buildkite's success response 89 | // after triggering a build. This has many fields, but we only need 90 | // one of them. 91 | type buildResponse struct { 92 | WebUrl string `json:"web_url"` 93 | } 94 | 95 | // reviewInput is a struct representing the data submitted to Gerrit 96 | // to post a review on a CL. 97 | // 98 | // https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#review-input 99 | type reviewInput struct { 100 | Message string `json:"message"` 101 | Labels map[string]int `json:"labels,omitempty"` 102 | OmitDuplicateComments bool `json:"omit_duplicate_comments"` 103 | IgnoreDefaultAttentionSetRules bool `json:"ignore_default_attention_set_rules"` 104 | Tag string `json:"tag"` 105 | Notify string `json:"notify,omitempty"` 106 | } 107 | 108 | func defaultConfigLocation() (string, error) { 109 | usr, err := user.Current() 110 | if err != nil { 111 | return "", fmt.Errorf("failed to get current user: %w", err) 112 | } 113 | 114 | return path.Join(usr.HomeDir, "besadii.json"), nil 115 | } 116 | 117 | func loadConfig() (*config, error) { 118 | configPath := os.Getenv("BESADII_CONFIG") 119 | 120 | if configPath == "" { 121 | var err error 122 | configPath, err = defaultConfigLocation() 123 | if err != nil { 124 | return nil, fmt.Errorf("failed to get config location: %w", err) 125 | } 126 | } 127 | 128 | configJson, err := os.ReadFile(configPath) 129 | if err != nil { 130 | return nil, fmt.Errorf("failed to load besadii config: %w", err) 131 | } 132 | 133 | var cfg config 134 | err = json.Unmarshal(configJson, &cfg) 135 | if err != nil { 136 | return nil, fmt.Errorf("failed to unmarshal besadii config: %w", err) 137 | } 138 | 139 | // The default Gerrit label to set is 'Verified', unless specified otherwise. 140 | if cfg.GerritLabel == "" { 141 | cfg.GerritLabel = "Verified" 142 | } 143 | 144 | // The default text referring to a Gerrit Change in BuildKite. 145 | if cfg.GerritChangeName == "" { 146 | cfg.GerritChangeName = "cl" 147 | } 148 | if !gerritChangeNameCheck.MatchString(cfg.GerritChangeName) { 149 | return nil, fmt.Errorf("invalid 'gerritChangeName': %s", cfg.GerritChangeName) 150 | } 151 | 152 | if cfg.Repository == "" || cfg.Branch == "" { 153 | return nil, fmt.Errorf("missing repository configuration (required: repository, branch)") 154 | } 155 | 156 | if cfg.GerritUrl == "" || cfg.GerritUser == "" || cfg.GerritPassword == "" { 157 | return nil, fmt.Errorf("missing Gerrit configuration (required: gerritUrl, gerritUser, gerritPassword)") 158 | } 159 | 160 | if cfg.BuildkiteOrg == "" || cfg.BuildkiteProject == "" || cfg.BuildkiteToken == "" { 161 | return nil, fmt.Errorf("mising Buildkite configuration (required: buildkiteOrg, buildkiteProject, buildkiteToken)") 162 | } 163 | 164 | return &cfg, nil 165 | } 166 | 167 | // linkToChange creates the full link to a change's patchset in Gerrit 168 | func linkToChange(cfg *config, changeId, patchset string) string { 169 | return fmt.Sprintf("%s/c/%s/+/%s/%s", cfg.GerritUrl, cfg.Repository, changeId, patchset) 170 | } 171 | 172 | // updateGerrit posts a comment on a Gerrit CL to indicate the current build status. 173 | func updateGerrit(cfg *config, review reviewInput, changeId, patchset string) { 174 | body, _ := json.Marshal(review) 175 | reader := io.NopCloser(bytes.NewReader(body)) 176 | 177 | url := fmt.Sprintf("%s/a/changes/%s/revisions/%s/review", cfg.GerritUrl, changeId, patchset) 178 | req, err := http.NewRequest("POST", url, reader) 179 | if err != nil { 180 | fmt.Fprintf(os.Stderr, "failed to create an HTTP request: %s", err) 181 | os.Exit(1) 182 | } 183 | 184 | req.SetBasicAuth(cfg.GerritUser, cfg.GerritPassword) 185 | req.Header.Add("Content-Type", "application/json") 186 | 187 | resp, err := http.DefaultClient.Do(req) 188 | if err != nil { 189 | fmt.Fprintf(os.Stderr, "failed to update %s on %s: %s", cfg.GerritChangeName, cfg.GerritUrl, err) 190 | } 191 | defer resp.Body.Close() 192 | 193 | if resp.StatusCode != http.StatusOK { 194 | respBody, _ := io.ReadAll(resp.Body) 195 | fmt.Fprintf(os.Stderr, "received non-success response from Gerrit: %s (%v)", respBody, resp.Status) 196 | } else { 197 | fmt.Printf("Added CI status comment on %s", linkToChange(cfg, changeId, patchset)) 198 | } 199 | } 200 | 201 | // Trigger a build of a given branch & commit on Buildkite 202 | func triggerBuild(cfg *config, log *syslog.Writer, trigger *buildTrigger) error { 203 | env := make(map[string]string) 204 | branch := trigger.ref 205 | 206 | // Pass information about the originating Gerrit change to the 207 | // build, if it is for a patchset. 208 | // 209 | // This information is later used by besadii when invoked by Gerrit 210 | // to communicate the build status back to Gerrit. 211 | headBuild := true 212 | if trigger.changeId != "" && trigger.patchset != "" { 213 | env["GERRIT_CHANGE_URL"] = linkToChange(cfg, trigger.changeId, trigger.patchset) 214 | env["GERRIT_CHANGE_ID"] = trigger.changeId 215 | env["GERRIT_PATCHSET"] = trigger.patchset 216 | headBuild = false 217 | 218 | // The branch doesn't have to be a real ref (it's just used to 219 | // group builds), so make it the identifier for the CL. 220 | branch = fmt.Sprintf("%s/%v", cfg.GerritChangeName, strings.Split(trigger.ref, "/")[3]) 221 | } 222 | 223 | build := Build{ 224 | Commit: trigger.commit, 225 | Branch: branch, 226 | Env: env, 227 | Author: Author{ 228 | Name: trigger.author, 229 | Email: trigger.email, 230 | }, 231 | } 232 | 233 | body, _ := json.Marshal(build) 234 | reader := io.NopCloser(bytes.NewReader(body)) 235 | 236 | bkUrl := fmt.Sprintf("https://api.buildkite.com/v2/organizations/%s/pipelines/%s/builds", cfg.BuildkiteOrg, cfg.BuildkiteProject) 237 | req, err := http.NewRequest("POST", bkUrl, reader) 238 | if err != nil { 239 | return fmt.Errorf("failed to create an HTTP request: %w", err) 240 | } 241 | 242 | req.Header.Add("Authorization", "Bearer "+cfg.BuildkiteToken) 243 | req.Header.Add("Content-Type", "application/json") 244 | 245 | resp, err := http.DefaultClient.Do(req) 246 | if err != nil { 247 | // This might indicate a temporary error on the Buildkite side. 248 | return fmt.Errorf("failed to send Buildkite request: %w", err) 249 | } 250 | defer resp.Body.Close() 251 | 252 | respBody, err := io.ReadAll(resp.Body) 253 | if err != nil { 254 | return fmt.Errorf("failed to read Buildkite response body: %w", err) 255 | } 256 | 257 | if resp.StatusCode != http.StatusCreated { 258 | return fmt.Errorf("received non-success response from Buildkite: %s (%v)", respBody, resp.Status) 259 | } 260 | 261 | var buildResp buildResponse 262 | err = json.Unmarshal(respBody, &buildResp) 263 | if err != nil { 264 | return fmt.Errorf("failed to unmarshal build response: %w", err) 265 | } 266 | 267 | fmt.Fprintf(log, "triggered build for ref %q at commit %q: %s", trigger.ref, trigger.commit, buildResp.WebUrl) 268 | 269 | // For builds of the HEAD branch there is nothing else to do 270 | if headBuild { 271 | return nil 272 | } 273 | 274 | // Report the status back to the Gerrit CL so that users can click 275 | // through to the running build. 276 | msg := fmt.Sprintf("Started build for patchset #%s on: %s", trigger.patchset, buildResp.WebUrl) 277 | review := reviewInput{ 278 | Message: msg, 279 | OmitDuplicateComments: true, 280 | Tag: "autogenerated:buildkite~trigger", 281 | 282 | // Do not update the attention set for this comment. 283 | IgnoreDefaultAttentionSetRules: true, 284 | 285 | Notify: "NONE", 286 | } 287 | updateGerrit(cfg, review, trigger.changeId, trigger.patchset) 288 | 289 | return nil 290 | } 291 | 292 | // Gerrit passes more flags than we want, but Rob Pike decided[0] in 293 | // 2013 that the Go art project will not allow users to ignore flags 294 | // because he "doesn't like it". This function allows users to ignore 295 | // flags. 296 | // 297 | // [0]: https://github.com/golang/go/issues/6112#issuecomment-66083768 298 | func ignoreFlags(ignore []string) { 299 | for _, f := range ignore { 300 | flag.String(f, "", "flag to ignore") 301 | } 302 | } 303 | 304 | // Extract the username & email from Gerrit's uploader flag and set it 305 | // on the trigger struct, for display in Buildkite. 306 | func extractChangeUploader(uploader string, trigger *buildTrigger) error { 307 | // Gerrit passes the uploader in another extra layer of quotes. 308 | uploader, err := strconv.Unquote(uploader) 309 | if err != nil { 310 | return fmt.Errorf("failed to unquote email - forgot quotes on manual invocation?: %w", err) 311 | } 312 | 313 | // Extract the uploader username & email from the input passed by 314 | // Gerrit (in RFC 5322 format). 315 | addr, err := mail.ParseAddress(uploader) 316 | if err != nil { 317 | return fmt.Errorf("invalid change uploader (%s): %w", uploader, err) 318 | } 319 | 320 | trigger.author = addr.Name 321 | trigger.email = addr.Address 322 | 323 | return nil 324 | } 325 | 326 | // Extract the buildtrigger struct out of the flags passed to besadii 327 | // when invoked as Gerrit's 'patchset-created' hook. This hook is used 328 | // for triggering CI on in-progress CLs. 329 | func buildTriggerFromPatchsetCreated(cfg *config) (*buildTrigger, error) { 330 | // Information that needs to be returned 331 | var trigger buildTrigger 332 | 333 | // Information that is only needed for parsing 334 | var targetBranch, changeUrl, uploader, kind string 335 | 336 | flag.StringVar(&trigger.project, "project", "", "Gerrit project") 337 | flag.StringVar(&trigger.commit, "commit", "", "commit hash") 338 | flag.StringVar(&trigger.patchset, "patchset", "", "patchset ID") 339 | 340 | flag.StringVar(&targetBranch, "branch", "", "CL target branch") 341 | flag.StringVar(&changeUrl, "change-url", "", "HTTPS URL of change") 342 | flag.StringVar(&uploader, "uploader", "", "Change uploader name & email") 343 | flag.StringVar(&kind, "kind", "", "Kind of patchset") 344 | 345 | // patchset-created also passes various flags which we don't need. 346 | ignoreFlags([]string{"topic", "change", "uploader-username", "change-owner", "change-owner-username"}) 347 | 348 | flag.Parse() 349 | 350 | // Ignore patchsets which do not contain code changes 351 | if kind == "NO_CODE_CHANGE" || kind == "NO_CHANGE" { 352 | return nil, nil 353 | } 354 | 355 | // Parse username & email 356 | err := extractChangeUploader(uploader, &trigger) 357 | if err != nil { 358 | return nil, err 359 | } 360 | 361 | // If the patchset is not for the HEAD branch of the monorepo, then 362 | // we can ignore it. It might be some other kind of change 363 | // (refs/meta/config or Gerrit-internal), but it is not an error. 364 | if trigger.project != cfg.Repository || targetBranch != cfg.Branch { 365 | return nil, nil 366 | } 367 | 368 | // Change ID is not directly passed in the numeric format, so we 369 | // need to extract it out of the URL 370 | matches := changeIdRegexp.FindStringSubmatch(changeUrl) 371 | trigger.changeId = matches[1] 372 | 373 | // Construct the CL ref from which the build should happen. 374 | changeId, _ := strconv.Atoi(trigger.changeId) 375 | trigger.ref = fmt.Sprintf( 376 | "refs/changes/%02d/%s/%s", 377 | changeId%100, trigger.changeId, trigger.patchset, 378 | ) 379 | 380 | return &trigger, nil 381 | } 382 | 383 | // Extract the buildtrigger struct out of the flags passed to besadii 384 | // when invoked as Gerrit's 'change-merged' hook. This hook is used 385 | // for triggering HEAD builds after change submission. 386 | func buildTriggerFromChangeMerged(cfg *config) (*buildTrigger, error) { 387 | // Information that needs to be returned 388 | var trigger buildTrigger 389 | 390 | // Information that is only needed for parsing 391 | var targetBranch, submitter string 392 | 393 | flag.StringVar(&trigger.project, "project", "", "Gerrit project") 394 | flag.StringVar(&trigger.commit, "commit", "", "Commit hash") 395 | flag.StringVar(&submitter, "submitter", "", "Submitter email & username") 396 | flag.StringVar(&targetBranch, "branch", "", "CL target branch") 397 | 398 | // Ignore extra flags passed by change-merged 399 | ignoreFlags([]string{"change", "topic", "change-url", "submitter-username", "newrev", "change-owner", "change-owner-username"}) 400 | 401 | flag.Parse() 402 | 403 | // Parse username & email 404 | err := extractChangeUploader(submitter, &trigger) 405 | if err != nil { 406 | return nil, err 407 | } 408 | 409 | // If the patchset is not for the HEAD branch of the monorepo, then 410 | // we can ignore it. 411 | if trigger.project != cfg.Repository || targetBranch != cfg.Branch { 412 | return nil, nil 413 | } 414 | 415 | trigger.ref = "refs/heads/" + targetBranch 416 | 417 | return &trigger, nil 418 | } 419 | 420 | func gerritHookMain(cfg *config, log *syslog.Writer, trigger *buildTrigger) { 421 | if trigger == nil { 422 | // The hook was not for something we care about. 423 | os.Exit(0) 424 | } 425 | 426 | err := triggerBuild(cfg, log, trigger) 427 | 428 | if err != nil { 429 | log.Err(fmt.Sprintf("failed to trigger Buildkite build: %s", err)) 430 | } 431 | } 432 | 433 | func postCommandMain(cfg *config) { 434 | changeId := os.Getenv("GERRIT_CHANGE_ID") 435 | patchset := os.Getenv("GERRIT_PATCHSET") 436 | 437 | if changeId == "" || patchset == "" { 438 | // If these variables are unset, but the hook was invoked, the 439 | // build was most likely for a branch and not for a CL - no status 440 | // needs to be reported back to Gerrit! 441 | fmt.Printf("This isn't a %s build, nothing to do. Have a nice day!\n", cfg.GerritChangeName) 442 | return 443 | } 444 | 445 | if os.Getenv("BUILDKITE_LABEL") != ":duck:" { 446 | // this is not the build stage, don't do anything. 447 | return 448 | } 449 | 450 | var vote int 451 | var verb string 452 | var notify string 453 | 454 | if os.Getenv("BUILDKITE_COMMAND_EXIT_STATUS") == "0" { 455 | vote = 1 // automation passed: +1 in Gerrit 456 | verb = "passed" 457 | notify = "NONE" 458 | } else { 459 | vote = -1 460 | verb = "failed" 461 | notify = "OWNER" 462 | } 463 | 464 | msg := fmt.Sprintf("Build of patchset %s %s: %s", patchset, verb, os.Getenv("BUILDKITE_BUILD_URL")) 465 | review := reviewInput{ 466 | Message: msg, 467 | OmitDuplicateComments: true, 468 | Labels: map[string]int{ 469 | cfg.GerritLabel: vote, 470 | }, 471 | 472 | // Update the attention set if we are failing this patchset. 473 | IgnoreDefaultAttentionSetRules: vote == 1, 474 | 475 | Tag: "autogenerated:buildkite~result", 476 | 477 | Notify: notify, 478 | } 479 | updateGerrit(cfg, review, changeId, patchset) 480 | } 481 | 482 | func main() { 483 | // Logging happens in syslog because it's almost impossible to get 484 | // output out of Gerrit hooks otherwise. 485 | log, err := syslog.New(syslog.LOG_INFO|syslog.LOG_USER, "besadii") 486 | if err != nil { 487 | fmt.Fprintf(os.Stderr, "failed to open syslog: %s\n", err) 488 | os.Exit(1) 489 | } 490 | 491 | log.Info(fmt.Sprintf("besadii called with arguments: %v", os.Args)) 492 | 493 | bin := path.Base(os.Args[0]) 494 | cfg, err := loadConfig() 495 | 496 | if err != nil { 497 | log.Crit(fmt.Sprintf("besadii configuration error: %v", err)) 498 | os.Exit(4) 499 | } 500 | 501 | if bin == "patchset-created" { 502 | trigger, err := buildTriggerFromPatchsetCreated(cfg) 503 | if err != nil { 504 | log.Crit(fmt.Sprintf("failed to parse 'patchset-created' invocation from args: %v", err)) 505 | os.Exit(1) 506 | } 507 | gerritHookMain(cfg, log, trigger) 508 | } else if bin == "change-merged" { 509 | trigger, err := buildTriggerFromChangeMerged(cfg) 510 | if err != nil { 511 | log.Crit(fmt.Sprintf("failed to parse 'change-merged' invocation from args: %v", err)) 512 | os.Exit(1) 513 | } 514 | gerritHookMain(cfg, log, trigger) 515 | } else if bin == "post-command" { 516 | postCommandMain(cfg) 517 | } else { 518 | fmt.Fprintf(os.Stderr, "besadii does not know how to be invoked as %q, sorry!", bin) 519 | os.Exit(1) 520 | } 521 | } 522 | -------------------------------------------------------------------------------- /buildGo/.skip-subtree: -------------------------------------------------------------------------------- 1 | Subdirectories of this folder should not be imported since they are 2 | internal to buildGo.nix and incompatible with readTree. 3 | -------------------------------------------------------------------------------- /buildGo/README.md: -------------------------------------------------------------------------------- 1 | buildGo.nix 2 | =========== 3 | 4 | This is an alternative [Nix][] build system for [Go][]. It supports building Go 5 | libraries and programs. 6 | 7 | *Note:* This will probably end up being folded into [Nixery][]. 8 | 9 | ## Background 10 | 11 | Most language-specific Nix tooling outsources the build to existing 12 | language-specific build tooling, which essentially means that Nix ends up being 13 | a wrapper around all sorts of external build systems. 14 | 15 | However, systems like [Bazel][] take an alternative approach in which the 16 | compiler is invoked directly and the composition of programs and libraries stays 17 | within a single homogeneous build system. 18 | 19 | Users don't need to learn per-language build systems and especially for 20 | companies with large monorepo-setups ([like Google][]) this has huge 21 | productivity impact. 22 | 23 | This project is an attempt to prove that Nix can be used in a similar style to 24 | build software directly, rather than shelling out to other build systems. 25 | 26 | ## Example 27 | 28 | Given a program layout like this: 29 | 30 | ``` 31 | . 32 | ├── lib <-- some library component 33 | │   ├── bar.go 34 | │   └── foo.go 35 | ├── main.go <-- program implementation 36 | └── default.nix <-- build instructions 37 | ``` 38 | 39 | The contents of `default.nix` could look like this: 40 | 41 | ```nix 42 | { buildGo }: 43 | 44 | let 45 | lib = buildGo.package { 46 | name = "somelib"; 47 | srcs = [ 48 | ./lib/bar.go 49 | ./lib/foo.go 50 | ]; 51 | }; 52 | in buildGo.program { 53 | name = "my-program"; 54 | deps = [ lib ]; 55 | 56 | srcs = [ 57 | ./main.go 58 | ]; 59 | } 60 | ``` 61 | 62 | (If you don't know how to read Nix, check out [nix-1p][]) 63 | 64 | ## Usage 65 | 66 | `buildGo` exposes five different functions: 67 | 68 | * `buildGo.program`: Build a Go binary out of the specified source files. 69 | 70 | | parameter | type | use | required? | 71 | |-----------|-------------------------|------------------------------------------------|-----------| 72 | | `name` | `string` | Name of the program (and resulting executable) | yes | 73 | | `srcs` | `list` | List of paths to source files | yes | 74 | | `deps` | `list` | List of dependencies (i.e. other Go libraries) | no | 75 | | `x_defs` | `attrs` | Attribute set of linker vars (i.e. `-X`-flags) | no | 76 | 77 | * `buildGo.package`: Build a Go library out of the specified source files. 78 | 79 | | parameter | type | use | required? | 80 | |-----------|--------------|------------------------------------------------|-----------| 81 | | `name` | `string` | Name of the library | yes | 82 | | `srcs` | `list` | List of paths to source files | yes | 83 | | `deps` | `list` | List of dependencies (i.e. other Go libraries) | no | 84 | | `path` | `string` | Go import path for the resulting library | no | 85 | 86 | * `buildGo.external`: Build an externally defined Go library or program. 87 | 88 | This function performs analysis on the supplied source code (which 89 | can use the standard Go tooling layout) and creates a tree of all 90 | the packages contained within. 91 | 92 | This exists for compatibility with external libraries that were not 93 | defined using buildGo. 94 | 95 | | parameter | type | use | required? | 96 | |-----------|----------------|-----------------------------------------------|-----------| 97 | | `path` | `string` | Go import path for the resulting package | yes | 98 | | `src` | `path` | Path to the source **directory** | yes | 99 | | `deps` | `list` | List of dependencies (i.e. other Go packages) | no | 100 | 101 | ## Current status 102 | 103 | This project is work-in-progress. Crucially it is lacking the following features: 104 | 105 | * feature flag parity with Bazel's Go rules 106 | * documentation building 107 | * test execution 108 | 109 | There are still some open questions around how to structure some of those 110 | features in Nix. 111 | 112 | [Nix]: https://nixos.org/nix/ 113 | [Go]: https://golang.org/ 114 | [Nixery]: https://github.com/google/nixery 115 | [Bazel]: https://bazel.build/ 116 | [like Google]: https://ai.google/research/pubs/pub45424 117 | [nix-1p]: https://github.com/tazjin/nix-1p 118 | -------------------------------------------------------------------------------- /buildGo/default.nix: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC. 2 | # SPDX-License-Identifier: Apache-2.0 3 | # 4 | # buildGo provides Nix functions to build Go packages in the style of Bazel's 5 | # rules_go. 6 | 7 | { pkgs ? import { } 8 | , ... 9 | }: 10 | 11 | let 12 | inherit (builtins) 13 | attrNames 14 | baseNameOf 15 | dirOf 16 | elemAt 17 | filter 18 | listToAttrs 19 | map 20 | match 21 | readDir 22 | replaceStrings 23 | toString; 24 | 25 | inherit (pkgs) lib runCommand fetchFromGitHub protobuf symlinkJoin go; 26 | goStdlib = buildStdlib go; 27 | 28 | # Helpers for low-level Go compiler invocations 29 | spaceOut = lib.concatStringsSep " "; 30 | 31 | includeDepSrc = dep: "-I ${dep}"; 32 | includeSources = deps: spaceOut (map includeDepSrc deps); 33 | 34 | includeDepLib = dep: "-L ${dep}"; 35 | includeLibs = deps: spaceOut (map includeDepLib deps); 36 | 37 | srcBasename = src: elemAt (match "([a-z0-9]{32}\-)?(.*\.go)" (baseNameOf src)) 1; 38 | srcCopy = path: src: "cp ${src} $out/${path}/${srcBasename src}"; 39 | srcList = path: srcs: lib.concatStringsSep "\n" (map (srcCopy path) srcs); 40 | 41 | allDeps = deps: lib.unique (lib.flatten (deps ++ (map (d: d.goDeps) deps))); 42 | 43 | xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs)); 44 | 45 | # Add an `overrideGo` attribute to a function result that works 46 | # similar to `overrideAttrs`, but is used specifically for the 47 | # arguments passed to Go builders. 48 | makeOverridable = f: orig: (f orig) // { 49 | overrideGo = new: makeOverridable f (orig // (new orig)); 50 | }; 51 | 52 | buildStdlib = go: runCommand "go-stdlib-${go.version}" 53 | { 54 | nativeBuildInputs = [ go ]; 55 | } '' 56 | HOME=$NIX_BUILD_TOP/home 57 | mkdir $HOME 58 | 59 | goroot="$(go env GOROOT)" 60 | cp -R "$goroot/src" "$goroot/pkg" . 61 | 62 | chmod -R +w . 63 | GODEBUG=installgoroot=all GOROOT=$NIX_BUILD_TOP go install -v --trimpath std 64 | 65 | mkdir $out 66 | cp -r pkg/*_*/* $out 67 | 68 | find $out -name '*.a' | while read -r ARCHIVE_FULL; do 69 | ARCHIVE="''${ARCHIVE_FULL#"$out/"}" 70 | PACKAGE="''${ARCHIVE%.a}" 71 | echo "packagefile $PACKAGE=$ARCHIVE_FULL" 72 | done > $out/importcfg 73 | ''; 74 | 75 | importcfgCmd = { name, deps, out ? "importcfg" }: '' 76 | echo "# nix buildGo ${name}" > "${out}" 77 | cat "${goStdlib}/importcfg" >> "${out}" 78 | ${lib.concatStringsSep "\n" (map (dep: '' 79 | find "${dep}" -name '*.a' | while read -r pkgp; do 80 | relpath="''${pkgp#"${dep}/"}" 81 | pkgname="''${relpath%.a}" 82 | echo "packagefile $pkgname=$pkgp" 83 | done >> "${out}" 84 | '') deps)} 85 | ''; 86 | 87 | # High-level build functions 88 | 89 | # Build a Go program out of the specified files and dependencies. 90 | program = { name, srcs, deps ? [ ], x_defs ? { } }: 91 | let uniqueDeps = allDeps (map (d: d.gopkg) deps); 92 | in runCommand name { } '' 93 | ${importcfgCmd { inherit name; deps = uniqueDeps; }} 94 | ${go}/bin/go tool compile -o ${name}.a -importcfg=importcfg -trimpath=$PWD -trimpath=${go} -p main ${includeSources uniqueDeps} ${spaceOut srcs} 95 | mkdir -p $out/bin 96 | export GOROOT_FINAL=go 97 | ${go}/bin/go tool link -o $out/bin/${name} -importcfg=importcfg -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a 98 | ''; 99 | 100 | # Build a Go library assembled out of the specified files. 101 | # 102 | # This outputs both the sources and compiled binary, as both are 103 | # needed when downstream packages depend on it. 104 | package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }: 105 | let 106 | uniqueDeps = allDeps (map (d: d.gopkg) deps); 107 | 108 | # The build steps below need to be executed conditionally for Go 109 | # assembly if the analyser detected any *.s files. 110 | # 111 | # This is required for several popular packages (e.g. x/sys). 112 | ifAsm = do: lib.optionalString (sfiles != [ ]) do; 113 | asmBuild = ifAsm '' 114 | ${go}/bin/go tool asm -p ${path} -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles} 115 | ${go}/bin/go tool asm -p ${path} -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles} 116 | ''; 117 | asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h"; 118 | asmPack = ifAsm '' 119 | ${go}/bin/go tool pack r $out/${path}.a ./asm.o 120 | ''; 121 | 122 | gopkg = (runCommand "golib-${name}" { } '' 123 | mkdir -p $out/${path} 124 | ${srcList path (map (s: "${s}") srcs)} 125 | ${asmBuild} 126 | ${importcfgCmd { inherit name; deps = uniqueDeps; }} 127 | ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -importcfg=importcfg -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs} 128 | ${asmPack} 129 | '').overrideAttrs (_: { 130 | passthru = { 131 | inherit gopkg; 132 | goDeps = uniqueDeps; 133 | goImportPath = path; 134 | }; 135 | }); 136 | in 137 | gopkg; 138 | 139 | # Build a tree of Go libraries out of an external Go source 140 | # directory that follows the standard Go layout and was not built 141 | # with buildGo.nix. 142 | # 143 | # The derivation for each actual package will reside in an attribute 144 | # named "gopkg", and an attribute named "gobin" for binaries. 145 | external = import ./external { inherit pkgs program package; }; 146 | 147 | in 148 | { 149 | # Only the high-level builder functions are exposed, but made 150 | # overrideable. 151 | program = makeOverridable program; 152 | package = makeOverridable package; 153 | external = makeOverridable external; 154 | 155 | # re-expose the Go version used 156 | inherit go; 157 | } 158 | -------------------------------------------------------------------------------- /buildGo/example/default.nix: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # This file provides examples for how to use the various builder 5 | # functions provided by `buildGo`. 6 | # 7 | # The features used in the example are not exhaustive, but should give 8 | # users a quick introduction to how to use buildGo. 9 | 10 | let 11 | buildGo = import ../default.nix { }; 12 | 13 | # Example use of buildGo.package, which creates an importable Go 14 | # package from the specified source files. 15 | examplePackage = buildGo.package { 16 | name = "example"; 17 | srcs = [ 18 | ./lib.go 19 | ]; 20 | }; 21 | 22 | # Example use of buildGo.program, which builds an executable using 23 | # the specified name and dependencies (which in turn must have been 24 | # created via buildGo.package etc.) 25 | in 26 | buildGo.program { 27 | name = "example"; 28 | 29 | srcs = [ 30 | ./main.go 31 | ]; 32 | 33 | deps = [ 34 | examplePackage 35 | ]; 36 | 37 | x_defs = { 38 | "main.Flag" = "successfully"; 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /buildGo/example/lib.go: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package example 5 | 6 | // UUID returns a totally random, carefully chosen UUID 7 | func UUID() string { 8 | return "3640932f-ad40-4bc9-b45d-f504a0f5910a" 9 | } 10 | -------------------------------------------------------------------------------- /buildGo/example/main.go: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC. 2 | // SPDX-License-Identifier: Apache-2.0 3 | // 4 | // Package main provides a tiny example program for the Bazel-style 5 | // Nix build system for Go. 6 | 7 | package main 8 | 9 | import ( 10 | "example" 11 | "exampleproto" 12 | "fmt" 13 | ) 14 | 15 | var Flag string = "unsuccessfully" 16 | 17 | func main() { 18 | thing := exampleproto.Thing{ 19 | Id: example.UUID(), 20 | KindOfThing: "test thing", 21 | } 22 | 23 | fmt.Printf("The thing is a %s with ID %q\n", thing.Id, thing.KindOfThing) 24 | fmt.Printf("The flag has been %s set\n", Flag) 25 | } 26 | -------------------------------------------------------------------------------- /buildGo/external/default.nix: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC. 2 | # SPDX-License-Identifier: Apache-2.0 3 | { pkgs, program, package }: 4 | 5 | let 6 | inherit (builtins) 7 | elemAt 8 | foldl' 9 | fromJSON 10 | head 11 | length 12 | listToAttrs 13 | readFile 14 | replaceStrings 15 | tail 16 | unsafeDiscardStringContext 17 | throw; 18 | 19 | inherit (pkgs) lib runCommand go jq ripgrep; 20 | 21 | pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p); 22 | 23 | # Collect all non-vendored dependencies from the Go standard library 24 | # into a file that can be used to filter them out when processing 25 | # dependencies. 26 | stdlibPackages = runCommand "stdlib-pkgs.json" { } '' 27 | export HOME=$PWD 28 | export GOPATH=/dev/null 29 | ${go}/bin/go list std | \ 30 | ${ripgrep}/bin/rg -v 'vendor' | \ 31 | ${jq}/bin/jq -R '.' | \ 32 | ${jq}/bin/jq -c -s 'map({key: ., value: true}) | from_entries' \ 33 | > $out 34 | ''; 35 | 36 | analyser = program { 37 | name = "analyser"; 38 | 39 | srcs = [ 40 | ./main.go 41 | ]; 42 | 43 | x_defs = { 44 | "main.stdlibList" = "${stdlibPackages}"; 45 | }; 46 | }; 47 | 48 | mkset = path: value: 49 | if path == [ ] then { gopkg = value; } 50 | else { "${head path}" = mkset (tail path) value; }; 51 | 52 | last = l: elemAt l ((length l) - 1); 53 | 54 | toPackage = self: src: path: depMap: entry: 55 | let 56 | localDeps = map 57 | (d: lib.attrByPath (d ++ [ "gopkg" ]) 58 | ( 59 | throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'" 60 | ) 61 | self) 62 | entry.localDeps; 63 | 64 | foreignDeps = map 65 | (d: lib.attrByPath [ d.path ] 66 | ( 67 | throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'" 68 | ) 69 | depMap) 70 | entry.foreignDeps; 71 | 72 | args = { 73 | srcs = map (f: src + ("/" + f)) entry.files; 74 | deps = localDeps ++ foreignDeps; 75 | }; 76 | 77 | libArgs = args // { 78 | name = pathToName entry.name; 79 | path = lib.concatStringsSep "/" ([ path ] ++ entry.locator); 80 | sfiles = map (f: src + ("/" + f)) entry.sfiles; 81 | }; 82 | 83 | binArgs = args // { 84 | name = (last ((lib.splitString "/" path) ++ entry.locator)); 85 | }; 86 | in 87 | if entry.isCommand then (program binArgs) else (package libArgs); 88 | 89 | in 90 | { src, path, deps ? [ ] }: 91 | let 92 | # Build a map of dependencies (from their import paths to their 93 | # derivation) so that they can be conditionally imported only in 94 | # sub-packages that require them. 95 | depMap = listToAttrs (map 96 | (d: { 97 | name = d.goImportPath; 98 | value = d; 99 | }) 100 | (map (d: d.gopkg) deps)); 101 | 102 | name = pathToName path; 103 | analysisOutput = runCommand "${name}-structure.json" { } '' 104 | ${analyser}/bin/analyser -path ${path} -source ${src} > $out 105 | ''; 106 | # readFile adds the references of the read in file to the string context for 107 | # Nix >= 2.6 which would break the attribute set construction in fromJSON 108 | analysis = fromJSON (unsafeDiscardStringContext (readFile analysisOutput)); 109 | in 110 | lib.fix (self: foldl' lib.recursiveUpdate { } ( 111 | map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis 112 | )) 113 | -------------------------------------------------------------------------------- /buildGo/external/main.go: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | // This tool analyses external (i.e. not built with `buildGo.nix`) Go 5 | // packages to determine a build plan that Nix can import. 6 | package main 7 | 8 | import ( 9 | "encoding/json" 10 | "flag" 11 | "fmt" 12 | "go/build" 13 | "log" 14 | "os" 15 | "path" 16 | "path/filepath" 17 | "strings" 18 | ) 19 | 20 | // Path to a JSON file describing all standard library import paths. 21 | // This file is generated and set here by Nix during the build 22 | // process. 23 | var stdlibList string 24 | 25 | // pkg describes a single Go package within the specified source 26 | // directory. 27 | // 28 | // Return information includes the local (relative from project root) 29 | // and external (none-stdlib) dependencies of this package. 30 | type pkg struct { 31 | Name string `json:"name"` 32 | Locator []string `json:"locator"` 33 | Files []string `json:"files"` 34 | SFiles []string `json:"sfiles"` 35 | LocalDeps [][]string `json:"localDeps"` 36 | ForeignDeps []foreignDep `json:"foreignDeps"` 37 | IsCommand bool `json:"isCommand"` 38 | } 39 | 40 | type foreignDep struct { 41 | Path string `json:"path"` 42 | // filename, column and line number of the import, if known 43 | Position string `json:"position"` 44 | } 45 | 46 | // findGoDirs returns a filepath.WalkFunc that identifies all 47 | // directories that contain Go source code in a certain tree. 48 | func findGoDirs(at string) ([]string, error) { 49 | dirSet := make(map[string]bool) 50 | 51 | err := filepath.Walk(at, func(path string, info os.FileInfo, err error) error { 52 | if err != nil { 53 | return err 54 | } 55 | 56 | name := info.Name() 57 | // Skip folders that are guaranteed to not be relevant 58 | if info.IsDir() && (name == "testdata" || name == ".git") { 59 | return filepath.SkipDir 60 | } 61 | 62 | // If the current file is a Go file, then the directory is popped 63 | // (i.e. marked as a Go directory). 64 | if !info.IsDir() && strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") { 65 | dirSet[filepath.Dir(path)] = true 66 | } 67 | 68 | return nil 69 | }) 70 | 71 | if err != nil { 72 | return nil, err 73 | } 74 | 75 | goDirs := []string{} 76 | for goDir := range dirSet { 77 | goDirs = append(goDirs, goDir) 78 | } 79 | 80 | return goDirs, nil 81 | } 82 | 83 | // analysePackage loads and analyses the imports of a single Go 84 | // package, returning the data that is required by the Nix code to 85 | // generate a derivation for this package. 86 | func analysePackage(root, source, importpath string, stdlib map[string]bool) (pkg, error) { 87 | ctx := build.Default 88 | ctx.CgoEnabled = false 89 | 90 | p, err := ctx.ImportDir(source, build.IgnoreVendor) 91 | if err != nil { 92 | return pkg{}, err 93 | } 94 | 95 | local := [][]string{} 96 | foreign := []foreignDep{} 97 | 98 | for _, i := range p.Imports { 99 | if stdlib[i] { 100 | continue 101 | } 102 | 103 | if i == importpath { 104 | local = append(local, []string{}) 105 | } else if strings.HasPrefix(i, importpath+"/") { 106 | local = append(local, strings.Split(strings.TrimPrefix(i, importpath+"/"), "/")) 107 | } else { 108 | // The import positions is a map keyed on the import name. 109 | // The value is a list, presumably because an import can appear 110 | // multiple times in a package. Let’s just take the first one, 111 | // should be enough for a good error message. 112 | firstPos := p.ImportPos[i][0].String() 113 | foreign = append(foreign, foreignDep{Path: i, Position: firstPos}) 114 | } 115 | } 116 | 117 | prefix := strings.TrimPrefix(source, root+"/") 118 | 119 | locator := []string{} 120 | if len(prefix) != len(source) { 121 | locator = strings.Split(prefix, "/") 122 | } else { 123 | // Otherwise, the locator is empty since its the root package and 124 | // no prefix should be added to files. 125 | prefix = "" 126 | } 127 | 128 | files := []string{} 129 | for _, f := range p.GoFiles { 130 | files = append(files, path.Join(prefix, f)) 131 | } 132 | 133 | sfiles := []string{} 134 | for _, f := range p.SFiles { 135 | sfiles = append(sfiles, path.Join(prefix, f)) 136 | } 137 | 138 | return pkg{ 139 | Name: path.Join(importpath, prefix), 140 | Locator: locator, 141 | Files: files, 142 | SFiles: sfiles, 143 | LocalDeps: local, 144 | ForeignDeps: foreign, 145 | IsCommand: p.IsCommand(), 146 | }, nil 147 | } 148 | 149 | func loadStdlibPkgs(from string) (pkgs map[string]bool, err error) { 150 | f, err := os.ReadFile(from) 151 | if err != nil { 152 | return 153 | } 154 | 155 | err = json.Unmarshal(f, &pkgs) 156 | return 157 | } 158 | 159 | func main() { 160 | source := flag.String("source", "", "path to directory with sources to process") 161 | path := flag.String("path", "", "import path for the package") 162 | 163 | flag.Parse() 164 | 165 | if *source == "" { 166 | log.Fatalf("-source flag must be specified") 167 | } 168 | 169 | stdlibPkgs, err := loadStdlibPkgs(stdlibList) 170 | if err != nil { 171 | log.Fatalf("failed to load standard library index from %q: %s\n", stdlibList, err) 172 | } 173 | 174 | goDirs, err := findGoDirs(*source) 175 | if err != nil { 176 | log.Fatalf("failed to walk source directory '%s': %s", *source, err) 177 | } 178 | 179 | all := []pkg{} 180 | for _, d := range goDirs { 181 | analysed, err := analysePackage(*source, d, *path, stdlibPkgs) 182 | 183 | // If the Go source analysis returned "no buildable Go files", 184 | // that directory should be skipped. 185 | // 186 | // This might be due to `+build` flags on the platform and other 187 | // reasons (such as test files). 188 | if _, ok := err.(*build.NoGoError); ok { 189 | continue 190 | } 191 | 192 | if err != nil { 193 | log.Fatalf("failed to analyse package at %q: %s", d, err) 194 | } 195 | all = append(all, analysed) 196 | } 197 | 198 | j, _ := json.Marshal(all) 199 | fmt.Println(string(j)) 200 | } 201 | -------------------------------------------------------------------------------- /buildkite.yml: -------------------------------------------------------------------------------- 1 | # Build pipeline for the filtered //views/kit workspace of depot. This 2 | # pipeline is triggered by each build of canon. 3 | # 4 | # Pipeline status is visible on https://buildkite.com/tvl/tvl-kit 5 | 6 | steps: 7 | - command: "nix-build --no-out-link -A besadii" 8 | label: ":nix: besadii" 9 | 10 | - command: "nix-build --no-out-link -A magrathea" 11 | label: ":nix: magrathea" 12 | 13 | - label: ":nix: lazy-deps" 14 | command: | 15 | nix-build -E 'with import ./. {}; lazy-deps { mg.attr = "magrathea"; }' 16 | result/bin/mg 17 | -------------------------------------------------------------------------------- /buildkite/default.nix: -------------------------------------------------------------------------------- 1 | # Logic for generating Buildkite pipelines from Nix build targets read 2 | # by //nix/readTree. 3 | # 4 | # It outputs a "YAML" (actually JSON) file which is evaluated and 5 | # submitted to Buildkite at the start of each build. 6 | # 7 | # The structure of the file that is being created is documented here: 8 | # https://buildkite.com/docs/pipelines/defining-steps 9 | { depot, pkgs, ... }: 10 | 11 | let 12 | inherit (builtins) 13 | attrValues 14 | concatLists 15 | concatStringsSep 16 | elem 17 | foldl' 18 | hasAttr 19 | hashString 20 | isString 21 | length 22 | listToAttrs 23 | mapAttrs 24 | toJSON 25 | unsafeDiscardStringContext; 26 | 27 | inherit (pkgs) lib runCommand writeText; 28 | inherit (depot.nix.readTree) mkLabel; 29 | 30 | inherit (depot.nix) dependency-analyzer; 31 | in 32 | rec { 33 | # Create a unique key for the buildkite pipeline based on the given derivation 34 | # or drvPath. A consequence of using such keys is that every derivation may 35 | # only be exposed as a single, unique step in the pipeline. 36 | keyForDrv = drvOrPath: 37 | let 38 | drvPath = 39 | if lib.isDerivation drvOrPath then drvOrPath.drvPath 40 | else if lib.isString drvOrPath then drvOrPath 41 | else builtins.throw "keyForDrv: expected string or derivation"; 42 | 43 | # Only use the drv hash to prevent escaping problems. Buildkite also has a 44 | # limit of 100 characters on keys. 45 | in 46 | "drv-" + (builtins.substring 0 32 47 | (builtins.baseNameOf (unsafeDiscardStringContext drvPath)) 48 | ); 49 | 50 | # Given an arbitrary attribute path generate a Nix expression which obtains 51 | # this from the root of depot (assumed to be ./.). Attributes may be any 52 | # Nix strings suitable as attribute names, not just Nix literal-safe strings. 53 | mkBuildExpr = attrPath: 54 | let 55 | descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})"; 56 | in 57 | foldl' descend "import ./. {}" attrPath; 58 | 59 | # Determine whether to skip a target if it has not diverged from the 60 | # HEAD branch. 61 | shouldSkip = { parentTargetMap ? { }, label, drvPath }: 62 | if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath 63 | then "Target has not changed." 64 | else false; 65 | 66 | # Create build command for an attribute path pointing to a derivation. 67 | mkBuildCommand = { attrPath, drvPath, outLink ? "result" }: concatStringsSep " " [ 68 | # If the nix build fails, the Nix command's exit status should be used. 69 | "set -o pipefail;" 70 | 71 | # First try to realise the drvPath of the target so we don't evaluate twice. 72 | # Nix has no concept of depending on a derivation file without depending on 73 | # at least one of its `outPath`s, so we need to discard the string context 74 | # if we don't want to build everything during pipeline construction. 75 | # 76 | # To make this more uniform with how nix-build(1) works, we call realpath(1) 77 | # on nix-store(1)'s output since it has the habit of printing the path of the 78 | # out link, not the store path. 79 | "(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)" 80 | 81 | # Since we don't gcroot the derivation files, they may be deleted by the 82 | # garbage collector. In that case we can reevaluate and build the attribute 83 | # using nix-build. 84 | "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')" 85 | ]; 86 | 87 | # Attribute path of a target relative to the depot root. Needs to take into 88 | # account whether the target is a physical target (which corresponds to a path 89 | # in the filesystem) or the subtarget of a physical target. 90 | targetAttrPath = target: 91 | target.__readTree 92 | ++ lib.optionals (target ? __subtarget) [ target.__subtarget ]; 93 | 94 | # Given a derivation (identified by drvPath) that is part of the list of 95 | # targets passed to mkPipeline, determine all derivations that it depends on 96 | # and are also part of the pipeline. Finally, return the keys of the steps 97 | # that build them. This is used to populate `depends_on` in `mkStep`. 98 | # 99 | # See //nix/dependency-analyzer for documentation on the structure of `targetDepMap`. 100 | getTargetPipelineDeps = targetDepMap: drvPath: 101 | builtins.map keyForDrv (targetDepMap.${drvPath}.knownDeps or [ ]); 102 | 103 | # Create a pipeline step from a single target. 104 | mkStep = { headBranch, parentTargetMap, targetDepMap, target, cancelOnBuildFailing }: 105 | let 106 | label = mkLabel target; 107 | drvPath = unsafeDiscardStringContext target.drvPath; 108 | in 109 | { 110 | label = ":nix: " + label; 111 | key = keyForDrv target; 112 | skip = shouldSkip { inherit label drvPath parentTargetMap; }; 113 | command = mkBuildCommand { 114 | attrPath = targetAttrPath target; 115 | inherit drvPath; 116 | }; 117 | env.READTREE_TARGET = label; 118 | cancel_on_build_failing = cancelOnBuildFailing; 119 | 120 | # Add a dependency on the initial static pipeline step which 121 | # always runs. This allows build steps uploaded in batches to 122 | # start running before all batches have been uploaded. 123 | depends_on = [ ":init:" ] 124 | ++ getTargetPipelineDeps targetDepMap drvPath 125 | ++ lib.optionals (target ? meta.ci.buildkiteExtraDeps) target.meta.ci.buildkiteExtraDeps; 126 | } // lib.optionalAttrs (target ? meta.timeout) { 127 | timeout_in_minutes = target.meta.timeout / 60; 128 | # Additional arguments to set on the step. 129 | # Keep in mind these *overwrite* existing step args, not extend. Use with caution. 130 | } // lib.optionalAttrs (target ? meta.ci.buildkiteExtraStepArgs) target.meta.ci.buildkiteExtraStepArgs; 131 | 132 | # Helper function to inelegantly divide a list into chunks of at 133 | # most n elements. 134 | # 135 | # This works by assigning each element a chunk ID based on its 136 | # index, and then grouping all elements by their chunk ID. 137 | chunksOf = n: list: 138 | let 139 | chunkId = idx: toString (idx / n + 1); 140 | assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list; 141 | unchunk = mapAttrs (_: elements: map (e: e.value) elements); 142 | in 143 | unchunk (lib.groupBy (e: e.chunk) assigned); 144 | 145 | # Define a build pipeline chunk as a JSON file, using the pipeline 146 | # format documented on 147 | # https://buildkite.com/docs/pipelines/defining-steps. 148 | makePipelineChunk = name: chunkId: chunk: rec { 149 | filename = "${name}-chunk-${chunkId}.json"; 150 | path = writeText filename (toJSON { 151 | steps = chunk; 152 | }); 153 | }; 154 | 155 | # Split the pipeline into chunks of at most 192 steps at once, which 156 | # are uploaded sequentially. This is because of a limitation in the 157 | # Buildkite backend which struggles to process more than a specific 158 | # number of chunks at once. 159 | pipelineChunks = name: steps: 160 | attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps)); 161 | 162 | # Create a pipeline structure for the given targets. 163 | mkPipeline = 164 | { 165 | # HEAD branch of the repository on which release steps, GC 166 | # anchoring and other "mainline only" steps should run. 167 | headBranch 168 | , # List of derivations as read by readTree (in most cases just the 169 | # output of readTree.gather) that should be built in Buildkite. 170 | # 171 | # These are scheduled as the first build steps and run as fast as 172 | # possible, in order, without any concurrency restrictions. 173 | drvTargets 174 | , # Derivation map of a parent commit. Only targets which no longer 175 | # correspond to the content of this map will be built. Passing an 176 | # empty map will always build all targets. 177 | parentTargetMap ? { } 178 | , # A list of plain Buildkite step structures to run alongside the 179 | # build for all drvTargets, but before proceeding with any 180 | # post-build actions such as status reporting. 181 | # 182 | # Can be used for things like code formatting checks. 183 | additionalSteps ? [ ] 184 | , # A list of plain Buildkite step structures to run after all 185 | # previous steps succeeded. 186 | # 187 | # Can be used for status reporting steps and the like. 188 | postBuildSteps ? [ ] 189 | # The list of phases known by the current Buildkite 190 | # pipeline. Dynamic pipeline chunks for each phase are uploaded 191 | # to Buildkite on execution of static part of the 192 | # pipeline. Phases selection is hard-coded in the static 193 | # pipeline. 194 | # 195 | # Pipeline generation will fail when an extra step with 196 | # unregistered phase is added. 197 | # 198 | # Common scenarios for different phase: 199 | # - "build" - main phase for building all Nix targets 200 | # - "release" - pushing artifacts to external repositories 201 | # - "deploy" - updating external deployment configurations 202 | , phases ? [ "build" "release" ] 203 | # Build phases that are active for this invocation (i.e. their 204 | # steps should be generated). 205 | # 206 | # This can be used to disable outputting parts of a pipeline if, 207 | # for example, build and release phases are created in separate 208 | # eval contexts. 209 | # 210 | # TODO(tazjin): Fail/warn if unknown phase is requested. 211 | , activePhases ? phases 212 | # Setting this attribute to true cancels dynamic pipeline steps 213 | # as soon as the build is marked as failing. 214 | # 215 | # To enable this feature one should enable "Fail Fast" setting 216 | # at Buildkite pipeline or on organization level. 217 | , cancelOnBuildFailing ? false 218 | }: 219 | let 220 | # List of phases to include. 221 | enabledPhases = lib.intersectLists activePhases phases; 222 | 223 | # Is the 'build' phase included? This phase is treated specially 224 | # because it always contains the plain Nix builds, and some 225 | # logic/optimisation depends on knowing whether is executing. 226 | buildEnabled = elem "build" enabledPhases; 227 | 228 | # Dependency relations between the `drvTargets`. See also //nix/dependency-analyzer. 229 | targetDepMap = 230 | let 231 | # Only calculate dependencies between drvTargets that were not part of 232 | # the previous pipeline (per parentTargetMap). Unchanged targets will 233 | # be skipped (assumed already built), so it's useless to emit deps 234 | # on their steps. 235 | changedDrvTargets = builtins.filter 236 | (target: 237 | parentTargetMap.${mkLabel target}.drvPath or null != target.drvPath 238 | ) 239 | drvTargets; 240 | in 241 | dependency-analyzer (dependency-analyzer.drvsToPaths changedDrvTargets); 242 | 243 | # Convert a target into all of its steps, separated by build 244 | # phase (as phases end up in different chunks). 245 | targetToSteps = target: 246 | let 247 | mkStepArgs = { 248 | inherit headBranch parentTargetMap targetDepMap target cancelOnBuildFailing; 249 | }; 250 | step = mkStep mkStepArgs; 251 | 252 | # Same step, but with an override function applied. This is 253 | # used in mkExtraStep if the extra step needs to modify the 254 | # parent derivation somehow. 255 | # 256 | # Note that this will never affect the label. 257 | overridable = f: mkStep (mkStepArgs // { target = (f target); }); 258 | 259 | # Split extra steps by phase. 260 | splitExtraSteps = lib.groupBy ({ phase, ... }: phase) 261 | (attrValues (mapAttrs (normaliseExtraStep phases overridable) 262 | (target.meta.ci.extraSteps or { }))); 263 | 264 | extraSteps = mapAttrs 265 | (_: steps: 266 | map (mkExtraStep (targetAttrPath target) buildEnabled) steps) 267 | splitExtraSteps; 268 | in 269 | if !buildEnabled then extraSteps 270 | else extraSteps // { 271 | build = [ step ] ++ (extraSteps.build or [ ]); 272 | }; 273 | 274 | # Combine all target steps into step lists per phase. 275 | # 276 | # TODO(tazjin): Refactor when configurable phases show up. 277 | globalSteps = { 278 | build = additionalSteps; 279 | release = postBuildSteps; 280 | }; 281 | 282 | phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists) 283 | ((map targetToSteps drvTargets) ++ [ globalSteps ]); 284 | 285 | # Generate pipeline chunks for each phase. 286 | chunks = foldl' 287 | (acc: phase: 288 | let phaseSteps = phasesWithSteps.${phase} or [ ]; in 289 | if phaseSteps == [ ] 290 | then acc 291 | else acc ++ (pipelineChunks phase phaseSteps)) 292 | [ ] 293 | enabledPhases; 294 | 295 | in 296 | runCommand "buildkite-pipeline" { } '' 297 | mkdir $out 298 | echo "Generated ${toString (length chunks)} pipeline chunks" 299 | ${ 300 | lib.concatMapStringsSep "\n" 301 | (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks 302 | } 303 | ''; 304 | 305 | # Create a drvmap structure for the given targets, containing the 306 | # mapping of all target paths to their derivations. The mapping can 307 | # be persisted for future use. 308 | mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map 309 | (target: { 310 | name = mkLabel target; 311 | value = { 312 | drvPath = unsafeDiscardStringContext target.drvPath; 313 | 314 | # Include the attrPath in the output to reconstruct the drv 315 | # without parsing the human-readable label. 316 | attrPath = targetAttrPath target; 317 | }; 318 | }) 319 | drvTargets))); 320 | 321 | # Implementation of extra step logic. 322 | # 323 | # Each target extra step is an attribute specified in 324 | # `meta.ci.extraSteps`. Its attribute name will be used as the step 325 | # name on Buildkite. 326 | # 327 | # command (required): A command that will be run in the depot 328 | # checkout when this step is executed. Should be a derivation 329 | # resulting in a single executable file, e.g. through 330 | # pkgs.writeShellScript. 331 | # 332 | # label (optional): Human-readable label for this step to display 333 | # in the Buildkite UI instead of the attribute name. 334 | # 335 | # prompt (optional): Setting this blocks the step until confirmed 336 | # by a human. Should be a string which is displayed for 337 | # confirmation. These steps always run after the main build is 338 | # done and have no influence on CI status. 339 | # 340 | # needsOutput (optional): If set to true, the parent derivation 341 | # will be built in the working directory before running the 342 | # command. Output will be available as 'result'. 343 | # TODO: Figure out multiple-output derivations. 344 | # 345 | # parentOverride (optional): A function (drv -> drv) to override 346 | # the parent's target definition when preparing its output. Only 347 | # used in extra steps that use needsOutput. 348 | # 349 | # branches (optional): Git references (branches, tags ... ) on 350 | # which this step should be allowed to run. List of strings. 351 | # 352 | # alwaysRun (optional): If set to true, this step will always run, 353 | # even if its parent has not been rebuilt. 354 | # 355 | # Note that gated steps are independent of each other. 356 | 357 | # Create a gated step in a step group, independent from any other 358 | # steps. 359 | mkGatedStep = { step, label, parent, prompt }: { 360 | inherit (step) depends_on; 361 | group = label; 362 | skip = parent.skip or false; 363 | 364 | steps = [ 365 | { 366 | inherit prompt; 367 | branches = step.branches or [ ]; 368 | block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})"; 369 | } 370 | 371 | # The explicit depends_on of the wrapped step must be removed, 372 | # otherwise its dependency relationship with the gate step will 373 | # break. 374 | (builtins.removeAttrs step [ "depends_on" ]) 375 | ]; 376 | }; 377 | 378 | # Validate and normalise extra step configuration before actually 379 | # generating build steps, in order to use user-provided metadata 380 | # during the pipeline generation. 381 | normaliseExtraStep = phases: overridableParent: key: 382 | { command 383 | , label ? key 384 | , needsOutput ? false 385 | , parentOverride ? (x: x) 386 | , branches ? null 387 | , alwaysRun ? false 388 | , prompt ? false 389 | , softFail ? false 390 | , phase ? "build" 391 | , skip ? false 392 | , agents ? null 393 | }: 394 | let 395 | parent = overridableParent parentOverride; 396 | parentLabel = parent.env.READTREE_TARGET; 397 | 398 | validPhase = lib.throwIfNot (elem phase phases) '' 399 | In step '${label}' (from ${parentLabel}): 400 | 401 | Phase '${phase}' is not valid. 402 | 403 | Known phases: ${concatStringsSep ", " phases} 404 | '' 405 | phase; 406 | in 407 | { 408 | inherit 409 | alwaysRun 410 | branches 411 | command 412 | key 413 | label 414 | needsOutput 415 | parent 416 | parentLabel 417 | softFail 418 | skip 419 | agents; 420 | 421 | phase = validPhase; 422 | 423 | prompt = lib.throwIf (prompt != false && phase == "build") '' 424 | In step '${label}' (from ${parentLabel}): 425 | 426 | The 'prompt' feature can not be used by steps in the "build" 427 | phase, because CI builds should not be gated on manual human 428 | approvals. 429 | '' 430 | prompt; 431 | }; 432 | 433 | # Create the Buildkite configuration for an extra step, optionally 434 | # wrapping it in a gate group. 435 | mkExtraStep = parentAttrPath: buildEnabled: cfg: 436 | let 437 | # ATTN: needs to match an entry in .gitignore so that the tree won't get dirty 438 | commandScriptLink = "nix-buildkite-extra-step-command-script"; 439 | 440 | step = { 441 | key = "extra-step-" + hashString "sha1" "${cfg.label}-${cfg.parentLabel}"; 442 | label = ":gear: ${cfg.label} (from ${cfg.parentLabel})"; 443 | skip = 444 | let 445 | # When parent doesn't have skip attribute set, default to false 446 | parentSkip = cfg.parent.skip or false; 447 | # Extra step skip parameter can be string explaining the 448 | # skip reason. 449 | extraStepSkip = if builtins.isString cfg.skip then true else cfg.skip; 450 | # Don't run if extra step is explicitly set to skip. If 451 | # parameter is not set or equal to false, follow parent behavior. 452 | skip' = if extraStepSkip then cfg.skip else parentSkip; 453 | in 454 | if cfg.alwaysRun then false else skip'; 455 | 456 | depends_on = lib.optional 457 | (buildEnabled && !cfg.alwaysRun && !cfg.needsOutput) 458 | cfg.parent.key; 459 | 460 | command = '' 461 | set -ueo pipefail 462 | ${lib.optionalString cfg.needsOutput 463 | "echo '~~~ Preparing build output of ${cfg.parentLabel}'" 464 | } 465 | ${lib.optionalString cfg.needsOutput cfg.parent.command} 466 | echo '--- Building extra step script' 467 | command_script="$(${ 468 | # Using command substitution in this way assumes the script drv only has one output 469 | assert builtins.length cfg.command.outputs == 1; 470 | mkBuildCommand { 471 | # script is exposed at .meta.ci.extraSteps..command 472 | attrPath = 473 | parentAttrPath 474 | ++ [ "meta" "ci" "extraSteps" cfg.key "command" ]; 475 | drvPath = unsafeDiscardStringContext cfg.command.drvPath; 476 | # make sure it doesn't conflict with result (from needsOutput) 477 | outLink = commandScriptLink; 478 | } 479 | })" 480 | echo '+++ Running extra step script' 481 | # ATTN: buildkite substitutes this variable outside of the execution for some reason 482 | exec "\$command_script" 483 | ''; 484 | 485 | soft_fail = cfg.softFail; 486 | } // (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; }) 487 | // (lib.optionalAttrs (cfg.branches != null) { 488 | branches = lib.concatStringsSep " " cfg.branches; 489 | }); 490 | in 491 | if (isString cfg.prompt) 492 | then 493 | mkGatedStep 494 | { 495 | inherit step; 496 | inherit (cfg) label parent prompt; 497 | } 498 | else step; 499 | } 500 | -------------------------------------------------------------------------------- /buildkite/fetch-parent-targets.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -ueo pipefail 3 | 4 | # Each Buildkite build stores the derivation target map as a pipeline 5 | # artifact. To reduce the amount of work done by CI, each CI build is 6 | # diffed against the latest such derivation map found for the 7 | # repository. 8 | # 9 | # Note that this does not take into account when the currently 10 | # processing CL was forked off from the canonical branch, meaning that 11 | # things like nixpkgs updates in between will cause mass rebuilds in 12 | # any case. 13 | # 14 | # If no map is found, the failure mode is not critical: We simply 15 | # build all targets. 16 | 17 | readonly REPO_ROOT=$(git rev-parse --show-toplevel) 18 | 19 | : ${DRVMAP_PATH:=pipeline/drvmap.json} 20 | : ${BUILDKITE_TOKEN_PATH:=~/buildkite-token} 21 | 22 | # Runs a fairly complex Buildkite GraphQL query that attempts to fetch all 23 | # pipeline-gen steps from the default branch, as long as one appears within the 24 | # last 50 builds or so. The query restricts build states to running or passed 25 | # builds, which means that it *should* be unlikely that nothing is found. 26 | # 27 | # There is no way to filter this more loosely (e.g. by saying "any recent build 28 | # matching these conditions"). 29 | # 30 | # The returned data structure is complex, and disassembled by a JQ script that 31 | # first filters out all builds with no matching jobs (e.g. builds that are still 32 | # in progress), and then filters those down to builds with artifacts, and then 33 | # to drvmap artifacts specifically. 34 | # 35 | # If a recent drvmap was found, this returns its download URL. Otherwise, it 36 | # returns the string "null". 37 | function latest_drvmap_url { 38 | set -u 39 | curl 'https://graphql.buildkite.com/v1' \ 40 | --silent \ 41 | -H "Authorization: Bearer $(cat ${BUILDKITE_TOKEN_PATH})" \ 42 | -H "Content-Type: application/json" \ 43 | -d "{\"query\": \"{ pipeline(slug: \\\"$BUILDKITE_ORGANIZATION_SLUG/$BUILDKITE_PIPELINE_SLUG\\\") { builds(first: 50, branch: [\\\"%default\\\"], state: [RUNNING, PASSED]) { edges { node { jobs(passed: true, first: 1, type: [COMMAND], step: {key: [\\\"pipeline-gen\\\"]}) { edges { node { ... on JobTypeCommand { url artifacts { edges { node { downloadURL path }}}}}}}}}}}}\"}" | tee out.json | \ 44 | jq -r '[.data.pipeline.builds.edges[] | select((.node.jobs.edges | length) > 0) | .node.jobs.edges[] | .node.artifacts[][] | select(.node.path == "pipeline/drvmap.json")][0].node.downloadURL' 45 | } 46 | 47 | readonly DOWNLOAD_URL=$(latest_drvmap_url) 48 | 49 | if [[ ${DOWNLOAD_URL} != "null" ]]; then 50 | mkdir -p tmp 51 | curl -o tmp/parent-target-map.json ${DOWNLOAD_URL} && echo "downloaded parent derivation map" \ 52 | || echo "failed to download derivation map!" 53 | else 54 | echo "no derivation map found!" 55 | fi 56 | -------------------------------------------------------------------------------- /checks/default.nix: -------------------------------------------------------------------------------- 1 | # Utilities for CI checks that work with the readTree-based CI. 2 | { pkgs, ... }: 3 | 4 | let 5 | inherit (pkgs.lib.strings) sanitizeDerivationName; 6 | in 7 | { 8 | # Utility for verifying Terraform configuration. 9 | # 10 | # Expects to be passed a pre-configured Terraform derivation and a 11 | # source path, and will do a dummy-initialisation and config 12 | # validation inside of that Terraform configuration. 13 | validateTerraform = 14 | { 15 | # Environment name to use (inconsequential, only for drv name) 16 | name ? "main" 17 | , # Terraform package to use. Should be pre-configured with the 18 | # correct providers. 19 | terraform ? pkgs.terraform 20 | , # Source path for Terraform configuration. Be careful about 21 | # relative imports. Use the 'subDir' parameter to optionally cd 22 | # into a subdirectory of source, e.g. if there is a flat structure 23 | # with modules. 24 | src 25 | , # Sub-directory of $src from which to run the check. Useful in 26 | # case of relative Terraform imports from a code tree 27 | subDir ? "." 28 | , # Environment variables to pass to Terraform. Necessary in case of 29 | # dummy environment variables that need to be set. 30 | env ? { } 31 | }: 32 | pkgs.runCommand "tf-validate-${sanitizeDerivationName name}" env '' 33 | cp -r ${src}/* . && chmod -R u+w . 34 | cd ${subDir} 35 | ${terraform}/bin/terraform init -upgrade -backend=false -input=false 36 | ${terraform}/bin/terraform validate | tee $out 37 | ''; 38 | } 39 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | # Externally importable TVL depot stack. This is intended to be called 2 | # with a supplied package set, otherwise the package set currently in 3 | # use by the TVL depot will be used. 4 | # 5 | # For now, readTree is not used inside of this configuration to keep 6 | # it simple. Adding it may be useful if we set up test scaffolding 7 | # around the exported workspace. 8 | 9 | { pkgs ? (import ./nixpkgs { 10 | depotOverlays = false; 11 | depot.third_party.sources = import ./sources { }; 12 | externalArgs = args; 13 | }) 14 | , ... 15 | }@args: 16 | 17 | pkgs.lib.fix (self: { 18 | besadii = import ./besadii { 19 | depot.nix.buildGo = self.buildGo; 20 | }; 21 | 22 | buildGo = import ./buildGo { inherit pkgs; }; 23 | 24 | buildkite = import ./buildkite { 25 | inherit pkgs; 26 | depot.nix = { 27 | inherit (self) readTree dependency-analyzer; 28 | }; 29 | }; 30 | 31 | checks = import ./checks { inherit pkgs; }; 32 | dependency-analyzer = import ./dependency-analyzer { 33 | inherit pkgs; 34 | inherit (pkgs) lib; 35 | depot.nix.stateMonad = self.stateMonad; 36 | }; 37 | lazy-deps = import ./lazy-deps { 38 | inherit pkgs; 39 | lib = pkgs.lib; 40 | }; 41 | magrathea = import ./magrathea { inherit pkgs; }; 42 | readTree = import ./readTree { }; 43 | stateMonad = import ./stateMonad { }; 44 | }) 45 | -------------------------------------------------------------------------------- /dependency-analyzer/default.nix: -------------------------------------------------------------------------------- 1 | { lib, depot, pkgs, ... }: 2 | 3 | let 4 | inherit (builtins) unsafeDiscardStringContext appendContext; 5 | 6 | # 7 | # Utilities 8 | # 9 | 10 | # Determine all paths a derivation depends on, i.e. input derivations and 11 | # files imported into the Nix store. 12 | # 13 | # Implementation for Nix < 2.6 is quite hacky at the moment. 14 | # 15 | # Type: str -> [str] 16 | # 17 | # TODO(sterni): clean this up and expose it 18 | directDrvDeps = 19 | let 20 | getDeps = 21 | if lib.versionAtLeast builtins.nixVersion "2.6" 22 | then 23 | # Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves 24 | # string context« through a readFile invocation. This has the side effect 25 | # that it becomes possible to query the actual references a store path has. 26 | # Not a 100% sure this is intended, but _very_ convenient for us here. 27 | drvPath: 28 | builtins.attrNames (builtins.getContext (builtins.readFile drvPath)) 29 | else 30 | # For Nix < 2.6 we have to rely on HACK, namely grepping for quoted 31 | # store path references in the file. In the future this should be 32 | # replaced by a proper derivation parser. 33 | drvPath: builtins.concatLists ( 34 | builtins.filter builtins.isList ( 35 | builtins.split 36 | "\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\"" 37 | (builtins.readFile drvPath) 38 | ) 39 | ); 40 | in 41 | drvPath: 42 | # if the passed path is not a derivation we can't necessarily get its 43 | # dependencies, since it may not be representable as a Nix string due to 44 | # NUL bytes, e.g. compressed patch files imported into the Nix store. 45 | if builtins.match "^.+\\.drv$" drvPath == null 46 | then [ ] 47 | else getDeps drvPath; 48 | 49 | # Maps a list of derivation to the list of corresponding `drvPath`s. 50 | # 51 | # Type: [drv] -> [str] 52 | drvsToPaths = drvs: 53 | builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs; 54 | 55 | # 56 | # Calculate map of direct derivation dependencies 57 | # 58 | 59 | # Create the dependency map entry for a given `drvPath` which mainly includes 60 | # a list of other `drvPath`s it depends on. Additionally we store whether the 61 | # derivation is `known`, i.e. part of the initial list of derivations we start 62 | # generating the map from 63 | # 64 | # Type: bool -> string -> set 65 | drvEntry = known: drvPath: 66 | let 67 | # key may not refer to a store path, … 68 | key = unsafeDiscardStringContext drvPath; 69 | # but we must read from the .drv file. 70 | path = builtins.unsafeDiscardOutputDependency drvPath; 71 | in 72 | { 73 | inherit key; 74 | # trick so we can call listToAttrs directly on the result of genericClosure 75 | name = key; 76 | value = { 77 | deps = directDrvDeps path; 78 | inherit known; 79 | }; 80 | }; 81 | 82 | # Create an attribute set that maps every derivation in the combined 83 | # dependency closure of the list of input derivation paths to every of their 84 | # direct dependencies. Additionally every entry will have set their `known` 85 | # attribute to `true` if it is in the list of input derivation paths. 86 | # 87 | # Type: [str] -> set 88 | plainDrvDepMap = drvPaths: 89 | builtins.listToAttrs ( 90 | builtins.genericClosure { 91 | startSet = builtins.map (drvEntry true) drvPaths; 92 | operator = { value, ... }: builtins.map (drvEntry false) value.deps; 93 | } 94 | ); 95 | 96 | # 97 | # Calculate closest known dependencies in the dependency map 98 | # 99 | 100 | inherit (depot.nix.stateMonad) 101 | after 102 | bind 103 | for_ 104 | get 105 | getAttr 106 | run 107 | setAttr 108 | pure 109 | ; 110 | 111 | # This is an action in stateMonad which expects the (initial) state to have 112 | # been produced by `plainDrvDepMap`. Given a `drvPath`, it calculates a 113 | # `knownDeps` list which holds the `drvPath`s of the closest derivation marked 114 | # as `known` along every edge. This list is inserted into the dependency map 115 | # for `drvPath` and every other derivation in its dependecy closure (unless 116 | # the information was already present). This means that the known dependency 117 | # information for a derivation never has to be recalculated, as long as they 118 | # are part of the same stateful computation. 119 | # 120 | # The upshot is that after calling `insertKnownDeps drvPath`, 121 | # `fmap (builtins.getAttr "knownDeps") (getAttr drvPath)` will always succeed. 122 | # 123 | # Type: str -> stateMonad drvDepMap null 124 | insertKnownDeps = drvPathWithContext: 125 | let 126 | # We no longer need to read from the store, so context is irrelevant, but 127 | # we need to check for attr names which requires the absence of context. 128 | drvPath = unsafeDiscardStringContext drvPathWithContext; 129 | in 130 | bind get (initDepMap: 131 | # Get the dependency map's state before we've done anything to obtain the 132 | # entry we'll be manipulating later as well as its dependencies. 133 | let 134 | entryPoint = initDepMap.${drvPath}; 135 | 136 | # We don't need to recurse if our direct dependencies either have their 137 | # knownDeps list already populated or are known dependencies themselves. 138 | depsPrecalculated = 139 | builtins.partition 140 | (dep: 141 | initDepMap.${dep}.known 142 | || initDepMap.${dep} ? knownDeps 143 | ) 144 | entryPoint.deps; 145 | 146 | # If a direct dependency is known, it goes right to our known dependency 147 | # list. If it is unknown, we can copy its knownDeps list into our own. 148 | initiallyKnownDeps = 149 | builtins.concatLists ( 150 | builtins.map 151 | (dep: 152 | if initDepMap.${dep}.known 153 | then [ dep ] 154 | else initDepMap.${dep}.knownDeps 155 | ) 156 | depsPrecalculated.right 157 | ); 158 | in 159 | 160 | # If the information was already calculated before, we can exit right away 161 | if entryPoint ? knownDeps 162 | then pure null 163 | else 164 | after 165 | # For all unknown direct dependencies which don't have a `knownDeps` 166 | # list, we call ourselves recursively to populate it. Since this is 167 | # done sequentially in the state monad, we avoid recalculating the 168 | # list for the same derivation multiple times. 169 | (for_ 170 | depsPrecalculated.wrong 171 | insertKnownDeps) 172 | # After this we can obtain the updated dependency map which will have 173 | # a `knownDeps` list for all our direct dependencies and update the 174 | # entry for the input `drvPath`. 175 | (bind 176 | get 177 | (populatedDepMap: 178 | (setAttr drvPath (entryPoint // { 179 | knownDeps = 180 | lib.unique ( 181 | initiallyKnownDeps 182 | ++ builtins.concatLists ( 183 | builtins.map 184 | (dep: populatedDepMap.${dep}.knownDeps) 185 | depsPrecalculated.wrong 186 | ) 187 | ); 188 | })))) 189 | ); 190 | 191 | # This function puts it all together and is exposed via `__functor`. 192 | # 193 | # For a list of `drvPath`s, calculate an attribute set which maps every 194 | # `drvPath` to a set of the following form: 195 | # 196 | # { 197 | # known = true /* if it is in the list of input derivation paths */; 198 | # deps = [ 199 | # /* list of derivation paths it depends on directly */ 200 | # ]; 201 | # knownDeps = [ 202 | # /* list of the closest derivation paths marked as known this 203 | # derivation depends on. 204 | # */ 205 | # ]; 206 | # } 207 | knownDrvDepMap = knownDrvPaths: 208 | run 209 | (plainDrvDepMap knownDrvPaths) 210 | (after 211 | (for_ 212 | knownDrvPaths 213 | insertKnownDeps) 214 | get); 215 | 216 | # 217 | # Other things based on knownDrvDepMap 218 | # 219 | 220 | # Create a SVG visualizing `knownDrvDepMap`. Nodes are identified by derivation 221 | # name, so multiple entries can be collapsed if they have the same name. 222 | # 223 | # Type: [drv] -> drv 224 | knownDependencyGraph = name: drvs: 225 | let 226 | justName = drvPath: 227 | builtins.substring 228 | (builtins.stringLength builtins.storeDir + 1 + 32 + 1) 229 | (builtins.stringLength drvPath) 230 | (unsafeDiscardStringContext drvPath); 231 | 232 | gv = pkgs.writeText "${name}-dependency-analysis.gv" '' 233 | digraph depot { 234 | ${ 235 | (lib.concatStringsSep "\n" 236 | (lib.mapAttrsToList (name: value: 237 | if !value.known then "" 238 | else lib.concatMapStringsSep "\n" 239 | (knownDep: " \"${justName name}\" -> \"${justName knownDep}\"") 240 | value.knownDeps 241 | ) 242 | (depot.nix.dependency-analyzer ( 243 | drvsToPaths drvs 244 | )))) 245 | } 246 | } 247 | ''; 248 | in 249 | 250 | pkgs.runCommand "${name}-dependency-analysis.svg" 251 | { 252 | nativeBuildInputs = [ 253 | pkgs.buildPackages.graphviz 254 | ]; 255 | } 256 | "dot -Tsvg < ${gv} > $out"; 257 | in 258 | 259 | { 260 | __functor = _: knownDrvDepMap; 261 | 262 | inherit knownDependencyGraph plainDrvDepMap drvsToPaths; 263 | } 264 | -------------------------------------------------------------------------------- /dependency-analyzer/examples/ci-targets.nix: -------------------------------------------------------------------------------- 1 | { depot, lib, ... }: 2 | 3 | ( 4 | depot.nix.dependency-analyzer.knownDependencyGraph 5 | "depot" 6 | depot.ci.targets 7 | ).overrideAttrs (old: { 8 | # Causes an infinite recursion via ci.targets otherwise 9 | meta = lib.recursiveUpdate (old.meta or { }) { 10 | ci.skip = true; 11 | }; 12 | }) 13 | -------------------------------------------------------------------------------- /dependency-analyzer/examples/lisp.nix: -------------------------------------------------------------------------------- 1 | { depot, lib, ... }: 2 | 3 | depot.nix.dependency-analyzer.knownDependencyGraph "3p-lisp" ( 4 | builtins.filter lib.isDerivation (builtins.attrValues depot.third_party.lisp) 5 | ) 6 | -------------------------------------------------------------------------------- /dependency-analyzer/tests/default.nix: -------------------------------------------------------------------------------- 1 | { depot, lib, ... }: 2 | 3 | let 4 | inherit (depot.nix.runTestsuite) 5 | runTestsuite 6 | assertEq 7 | it 8 | ; 9 | 10 | inherit (depot.nix.dependency-analyzer) 11 | plainDrvDepMap 12 | drvsToPaths 13 | ; 14 | 15 | knownDrvs = drvsToPaths ( 16 | builtins.filter lib.isDerivation (builtins.attrValues depot.third_party.lisp) 17 | ); 18 | exampleMap = plainDrvDepMap knownDrvs; 19 | 20 | # These will be needed to index into the attribute set which can't have context 21 | # in the attribute names. 22 | knownDrvsNoContext = builtins.map builtins.unsafeDiscardStringContext knownDrvs; 23 | in 24 | 25 | runTestsuite "dependency-analyzer" [ 26 | (it "checks plainDrvDepMap properties" [ 27 | (assertEq "all known drvs are marked known" 28 | (builtins.all (drv: exampleMap.${drv}.known) knownDrvsNoContext) 29 | true) 30 | (assertEq "no unknown drv is marked known" 31 | (builtins.all (entry: !entry.known) ( 32 | builtins.attrValues (builtins.removeAttrs exampleMap knownDrvsNoContext) 33 | )) 34 | true) 35 | ]) 36 | ] 37 | -------------------------------------------------------------------------------- /lazy-deps/default.nix: -------------------------------------------------------------------------------- 1 | # Helper function to synthesize a directory of "lazy-built" binaries 2 | # that can be added to $PATH inside of a repository. 3 | # 4 | # Using this, a Nix shell environment in some repository can contain 5 | # several slow-to-build commands without blowing up evaluation and 6 | # build time whenever the shell is loaded. 7 | # 8 | # Note that the generated script is deliberately impure to speed up 9 | # evaluation, and expects both `git` and `nix-build` to exist in the 10 | # user's $PATH. If required, this can be done in the shell 11 | # configuration invoking this function. 12 | { pkgs, lib, ... }: 13 | 14 | let 15 | inherit (builtins) attrNames attrValues mapAttrs; 16 | inherit (lib) fix concatStringsSep; 17 | 18 | # Create the case statement for a command invocations, optionally 19 | # overriding the `TARGET_TOOL` variable. 20 | invoke = name: { attr, cmd ? null }: '' 21 | ${name}) 22 | attr="${attr}" 23 | ${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"} 24 | ''; 25 | 26 | # Create command to symlink to the dispatch script for each tool. 27 | link = name: "ln -s $target $out/bin/${name}"; 28 | 29 | invocations = tools: concatStringsSep "\n" (attrValues (mapAttrs invoke tools)); 30 | in 31 | fix (self: 32 | 33 | # Attribute set of tools that should be lazily-added to the $PATH. 34 | # 35 | # The name of each attribute is used as the command name (on $PATH). 36 | # It must contain the keys 'attr' (containing the Nix attribute path 37 | # to the tool's derivation from the top-level), and may optionally 38 | # contain the key 'cmd' to override the name of the binary inside the 39 | # derivation. 40 | tools: 41 | 42 | pkgs.runCommandNoCC "lazy-dispatch" 43 | { 44 | passthru.overrideDeps = newTools: self (tools // newTools); 45 | passthru.tools = tools; 46 | 47 | text = '' 48 | #!${pkgs.runtimeShell} 49 | set -ue 50 | 51 | if ! type git>/dev/null || ! type nix-build>/dev/null; then 52 | echo "The 'git' and 'nix-build' commands must be available." >&2 53 | exit 127 54 | fi 55 | 56 | readonly REPO_ROOT=$(git rev-parse --show-toplevel) 57 | TARGET_TOOL=$(basename "$0") 58 | 59 | case "''${TARGET_TOOL}" in 60 | ${invocations tools} 61 | *) 62 | echo "''${TARGET_TOOL} is currently not installed in this repository." >&2 63 | exit 127 64 | ;; 65 | esac 66 | 67 | result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}") 68 | PATH="''${result}/bin:$PATH" 69 | exec "''${TARGET_TOOL}" "''${@}" 70 | ''; 71 | 72 | # Access this to get a compatible nix-shell 73 | passthru.devShell = pkgs.mkShellNoCC { 74 | name = "${self.name}-shell"; 75 | packages = [ self ]; 76 | }; 77 | } 78 | '' 79 | # Write the dispatch code 80 | target=$out/bin/__dispatch 81 | mkdir -p "$(dirname "$target")" 82 | echo "$text" > $target 83 | chmod +x $target 84 | 85 | # Add symlinks from all the tools to the dispatch 86 | ${concatStringsSep "\n" (map link (attrNames tools))} 87 | 88 | # Check that it's working-ish 89 | ${pkgs.stdenv.shellDryRun} $target 90 | '' 91 | ) 92 | -------------------------------------------------------------------------------- /magrathea/default.nix: -------------------------------------------------------------------------------- 1 | # magrathea helps you build planets 2 | # 3 | # it is a tool for working with monorepos in the style of tvl's depot 4 | { pkgs, ... }: 5 | 6 | let 7 | inherit (pkgs) 8 | stdenv 9 | chicken 10 | chickenPackages 11 | makeWrapper 12 | git 13 | nix 14 | lib 15 | ; 16 | 17 | in 18 | stdenv.mkDerivation { 19 | name = "magrathea"; 20 | src = ./.; 21 | dontInstall = true; 22 | 23 | nativeBuildInputs = [ chicken makeWrapper ]; 24 | buildInputs = with chickenPackages.chickenEggs; [ 25 | matchable 26 | srfi-13 27 | ]; 28 | 29 | propagatedBuildInputs = [ git ]; 30 | 31 | buildPhase = '' 32 | mkdir -p $out/bin 33 | csc -o $out/bin/mg -host -static ${./mg.scm} 34 | ''; 35 | 36 | fixupPhase = '' 37 | wrapProgram $out/bin/mg --prefix PATH ${lib.makeBinPath [ nix ]} 38 | ''; 39 | } 40 | -------------------------------------------------------------------------------- /magrathea/mg.scm: -------------------------------------------------------------------------------- 1 | ;; magrathea helps you build planets 2 | ;; 3 | ;; it is a tiny tool designed to ease workflows in monorepos that are 4 | ;; modeled after the tvl depot. 5 | ;; 6 | ;; users familiar with workflows from other, larger monorepos may be 7 | ;; used to having a build tool that can work in any tree location. 8 | ;; magrathea enables this, but with nix-y monorepos. 9 | 10 | (import (chicken base) 11 | (chicken format) 12 | (chicken irregex) 13 | (chicken port) 14 | (chicken file) 15 | (chicken file posix) 16 | (chicken process) 17 | (chicken process-context) 18 | (chicken string) 19 | (matchable) 20 | (only (chicken io) read-string)) 21 | 22 | (define usage #< [] 24 | mg run [] [-- ] 25 | mg shell [] [] 26 | 27 | target: 28 | a target specification with meaning inside of the repository. can 29 | be absolute (starting with //) or relative to the current directory 30 | (as long as said directory is inside of the repo). if no target is 31 | specified, the current directory's physical target is built. 32 | 33 | for example: 34 | 35 | //tools/magrathea - absolute physical target 36 | //foo/bar:baz - absolute virtual target 37 | magrathea - relative physical target 38 | :baz - relative virtual target 39 | 40 | commands: 41 | build - build a target 42 | shell - enter a shell with the target's build dependencies 43 | path - print source folder for the target 44 | repl - start a nix repl in the repository root 45 | run - build a target and execute its output 46 | 47 | file all feedback on b.tvl.fyi 48 | USAGE 49 | ) 50 | 51 | ;; parse target definitions. trailing slashes on physical targets are 52 | ;; allowed for shell autocompletion. 53 | ;; 54 | ;; component ::= any string without "/" or ":" 55 | ;; 56 | ;; physical-target ::= 57 | ;; | "/" 58 | ;; | "/" 59 | ;; 60 | ;; virtual-target ::= ":" 61 | ;; 62 | ;; relative-target ::= 63 | ;; | 64 | ;; | 65 | ;; 66 | ;; root-anchor ::= "//" 67 | ;; 68 | ;; target ::= | 69 | 70 | ;; read a path component until it looks like something else is coming 71 | (define (read-component first port) 72 | (let ((keep-reading? 73 | (lambda () (not (or (eq? #\/ (peek-char port)) 74 | (eq? #\: (peek-char port)) 75 | (eof-object? (peek-char port))))))) 76 | (let reader ((acc (list first)) 77 | (condition (keep-reading?))) 78 | (if condition (reader (cons (read-char port) acc) (keep-reading?)) 79 | (list->string (reverse acc)))))) 80 | 81 | ;; read something that started with a slash. what will it be? 82 | (define (read-slash port) 83 | (if (eq? #\/ (peek-char port)) 84 | (begin (read-char port) 85 | 'root-anchor) 86 | 'path-separator)) 87 | 88 | ;; read any target token and leave port sitting at the next one 89 | (define (read-token port) 90 | (match (read-char port) 91 | [#\/ (read-slash port)] 92 | [#\: 'virtual-separator] 93 | [other (read-component other port)])) 94 | 95 | ;; read a target into a list of target tokens 96 | (define (read-target target-str) 97 | (call-with-input-string 98 | target-str 99 | (lambda (port) 100 | (let reader ((acc '())) 101 | (if (eof-object? (peek-char port)) 102 | (reverse acc) 103 | (reader (cons (read-token port) acc))))))) 104 | 105 | (define-record target absolute components virtual) 106 | (define (empty-target) (make-target #f '() #f)) 107 | 108 | (define-record-printer (target t out) 109 | (fprintf out (conc (if (target-absolute t) "//" "") 110 | (string-intersperse (target-components t) "/") 111 | (if (target-virtual t) ":" "") 112 | (or (target-virtual t) "")))) 113 | 114 | ;; parse and validate a list of target tokens 115 | (define parse-tokens 116 | (lambda (tokens #!optional (mode 'root) (acc (empty-target))) 117 | (match (cons mode tokens) 118 | ;; absolute target 119 | [('root . ('root-anchor . rest)) 120 | (begin (target-absolute-set! acc #t) 121 | (parse-tokens rest 'root acc))] 122 | 123 | ;; relative target minus potential garbage 124 | [('root . (not ('path-separator . _))) 125 | (parse-tokens tokens 'normal acc)] 126 | 127 | ;; virtual target 128 | [('normal . ('virtual-separator . rest)) 129 | (parse-tokens rest 'virtual acc)] 130 | 131 | [('virtual . ((? string? v))) 132 | (begin 133 | (target-virtual-set! acc v) 134 | acc)] 135 | 136 | ;; chomp through all components and separators 137 | [('normal . ('path-separator . rest)) (parse-tokens rest 'normal acc)] 138 | [('normal . ((? string? component) . rest)) 139 | (begin (target-components-set! 140 | acc (append (target-components acc) (list component))) 141 | (parse-tokens rest 'normal acc ))] 142 | 143 | ;; nothing more to parse and not in a weird state, all done, yay! 144 | [('normal . ()) acc] 145 | 146 | ;; oh no, we ran out of input too early :( 147 | [(_ . ()) `(error . ,(format "unexpected end of input while parsing ~s target" mode))] 148 | 149 | ;; something else was invalid :( 150 | [_ `(error . ,(format "unexpected ~s while parsing ~s target" (car tokens) mode))]))) 151 | 152 | (define (parse-target target) 153 | (parse-tokens (read-target target))) 154 | 155 | ;; turn relative targets into absolute targets based on the current 156 | ;; directory 157 | (define (normalise-target t) 158 | (when (not (target-absolute t)) 159 | (target-components-set! t (append (relative-repo-path) 160 | (target-components t))) 161 | (target-absolute-set! t #t)) 162 | t) 163 | 164 | ;; nix doesn't care about the distinction between physical and virtual 165 | ;; targets, normalise it away 166 | (define (normalised-components t) 167 | (if (target-virtual t) 168 | (append (target-components t) (list (target-virtual t))) 169 | (target-components t))) 170 | 171 | ;; return the current repository root as a string 172 | (define mg--repository-root #f) 173 | (define (repository-root) 174 | (or mg--repository-root 175 | (begin 176 | (set! mg--repository-root 177 | (or (get-environment-variable "MG_ROOT") 178 | (call-with-input-pipe "git rev-parse --show-toplevel" 179 | (lambda (p) (read-chomping p))))) 180 | mg--repository-root))) 181 | 182 | ;; determine the current path relative to the root of the repository 183 | ;; and return it as a list of path components. 184 | (define (relative-repo-path) 185 | (string-split 186 | (substring (current-directory) (string-length (repository-root))) "/")) 187 | 188 | ;; escape a string for interpolation in nix code 189 | (define (nix-escape str) 190 | (string-translate* str '(("\"" . "\\\"") 191 | ("${" . "\\${")))) 192 | 193 | ;; create a nix expression to build the attribute at the specified 194 | ;; components 195 | ;; 196 | ;; an empty target will build the current folder instead. 197 | ;; 198 | ;; this uses builtins.getAttr explicitly to avoid problems with 199 | ;; escaping. 200 | (define (nix-expr-for target) 201 | (let nest ((parts (normalised-components (normalise-target target))) 202 | (acc (conc "(import " (repository-root) " {})"))) 203 | (match parts 204 | [() (conc "with builtins; " acc)] 205 | [_ (nest (cdr parts) 206 | (conc "(getAttr \"" 207 | (nix-escape (car parts)) 208 | "\" " acc ")"))]))) 209 | 210 | ;; exit and complain at the user if something went wrong 211 | (define (mg-error message) 212 | (format (current-error-port) "[mg] error: ~A~%" message) 213 | (exit 1)) 214 | 215 | (define (guarantee-success value) 216 | (match value 217 | [('error . message) (mg-error message)] 218 | [_ value])) 219 | 220 | (define-record build-args target passthru unknown) 221 | (define (execute-build args) 222 | (let ((expr (nix-expr-for (build-args-target args)))) 223 | (fprintf (current-error-port) "[mg] building target ~A~%" (build-args-target args)) 224 | (process-execute "nix-build" (append (list "-E" expr "--show-trace") 225 | (or (build-args-passthru args) '()))))) 226 | 227 | ;; split the arguments used for builds into target/unknown args/nix 228 | ;; args, where the latter occur after '--' 229 | (define (parse-build-args acc args) 230 | (match args 231 | ;; no arguments remaining, return accumulator as is 232 | [() acc] 233 | 234 | ;; next argument is '--' separator, split off passthru and 235 | ;; return 236 | [("--" . passthru) 237 | (begin 238 | (build-args-passthru-set! acc passthru) 239 | acc)] 240 | 241 | [(arg . rest) 242 | ;; set target if not already known (and if the first 243 | ;; argument does not look like an accidental unknown 244 | ;; parameter) 245 | (if (and (not (build-args-target acc)) 246 | (not (substring=? "-" arg))) 247 | (begin 248 | (build-args-target-set! acc (guarantee-success (parse-target arg))) 249 | (parse-build-args acc rest)) 250 | 251 | ;; otherwise, collect unknown arguments 252 | (begin 253 | (build-args-unknown-set! acc (append (or (build-args-unknown acc) '()) 254 | (list arg))) 255 | (parse-build-args acc rest)))])) 256 | 257 | ;; parse the passed build args, applying sanity checks and defaulting 258 | ;; the target if necessary, then execute the build 259 | (define (build args) 260 | (let ((parsed (parse-build-args (make-build-args #f #f #f) args))) 261 | ;; fail if there are unknown arguments present 262 | (when (build-args-unknown parsed) 263 | (let ((unknown (string-intersperse (build-args-unknown parsed)))) 264 | (mg-error (sprintf "unknown arguments: ~a 265 | 266 | if you meant to pass these arguments to nix, please separate them with 267 | '--' like so: 268 | 269 | mg build ~a -- ~a" 270 | unknown 271 | (or (build-args-target parsed) "") 272 | unknown)))) 273 | 274 | ;; default the target to the current folder's main target 275 | (unless (build-args-target parsed) 276 | (build-args-target-set! parsed (empty-target))) 277 | 278 | (execute-build parsed))) 279 | 280 | (define (execute-shell target #!optional command) 281 | (if command 282 | (fprintf (current-error-port) "[mg] executing ~A in shell for ~A~%" 283 | command 284 | target) 285 | (fprintf (current-error-port) "[mg] entering shell for ~A~%" target)) 286 | (let ((expr (nix-expr-for target)) 287 | (command (or command 288 | (get-environment-variable "SHELL") 289 | "bash"))) 290 | (process-execute "nix-shell" 291 | (list "-E" expr "--command" command)))) 292 | 293 | (define (shell args) 294 | (match args 295 | [() (execute-shell (empty-target))] 296 | [(target . args) (apply 297 | execute-shell 298 | (guarantee-success (parse-target target)) 299 | args)])) 300 | 301 | (define (repl args) 302 | (process-execute "nix" (append (list "repl" "--show-trace" (repository-root)) args))) 303 | 304 | (define (read-chomping pipe) 305 | (let ((s (read-string #f pipe))) 306 | (if (eq? s #!eof) "" (string-chomp s)))) 307 | 308 | (define (execute-run t #!optional cmd-args) 309 | (fprintf (current-error-port) "[mg] building target ~A~%" t) 310 | (let* ((expr (nix-expr-for t)) 311 | (out 312 | (receive (pipe _ pid) 313 | ;; TODO(sterni): temporary gc root 314 | (process "nix-build" (list "-E" expr "--no-out-link")) 315 | (let ((stdout (read-chomping pipe))) 316 | (receive (_ _ status) 317 | (process-wait pid) 318 | (when (not (eq? status 0)) 319 | (mg-error (format "Couldn't build target ~A" t))) 320 | stdout))))) 321 | 322 | (fprintf (current-error-port) "[mg] running target ~A~%" t) 323 | (process-execute 324 | ;; If the output is a file, we assume it's an executable à la writeExecline, 325 | ;; otherwise we look in the bin subdirectory and pick the only executable. 326 | ;; Handling multiple executables is not possible at the moment, the choice 327 | ;; could be made via a command line flag in the future. 328 | (if (regular-file? out) 329 | out 330 | (let* ((dir-path (string-append out "/bin")) 331 | (dir-contents (if (directory-exists? dir-path) 332 | (directory dir-path #f) 333 | '()))) 334 | (case (length dir-contents) 335 | ((0) (mg-error "no executables in build output") 336 | (exit 1)) 337 | ((1) (string-append dir-path "/" (car dir-contents))) 338 | (else (mg-error "more than one executable in build output") 339 | (exit 1))))) 340 | cmd-args))) 341 | 342 | (define (run args) 343 | (match args 344 | [() (execute-run (empty-target))] 345 | [("--" . rest) (execute-run (empty-target) rest)] 346 | [(target) (execute-run (guarantee-success (parse-target target)))] 347 | [(target . ("--" . rest)) (execute-run (guarantee-success (parse-target target)) rest)] 348 | ;; TODO(sterni): flag for selecting binary name 349 | [_ (mg-error "usage: mg run [] [-- ] (hint: use \"--\" to separate the `mg run []` invocation from the arguments you're passing to the built executable)")])) 350 | 351 | (define (path args) 352 | (match args 353 | [(arg) 354 | (print (apply string-append 355 | (intersperse 356 | (cons (repository-root) 357 | (target-components 358 | (normalise-target 359 | (guarantee-success (parse-target arg))))) 360 | "/")))] 361 | [() (mg-error "path command needs a target")] 362 | [other (mg-error (format "unknown arguments: ~a" other))])) 363 | 364 | (define (main args) 365 | (match args 366 | [() (print usage)] 367 | [("build" . _) (build (cdr args))] 368 | [("shell" . _) (shell (cdr args))] 369 | [("path" . _) (path (cdr args))] 370 | [("repl" . _) (repl (cdr args))] 371 | [("run" . _) (run (cdr args))] 372 | [other (begin (print "unknown command: mg " args) 373 | (print usage))])) 374 | 375 | (main (command-line-arguments)) 376 | -------------------------------------------------------------------------------- /nixpkgs/default.nix: -------------------------------------------------------------------------------- 1 | # This file imports the pinned nixpkgs sets and applies relevant 2 | # modifications, such as our overlays. 3 | # 4 | # The actual source pinning happens via niv in //third_party/sources 5 | # 6 | # Note that the attribute exposed by this (third_party.nixpkgs) is 7 | # "special" in that the fixpoint used as readTree's config parameter 8 | # in //default.nix passes this attribute as the `pkgs` argument to all 9 | # readTree derivations. 10 | 11 | { depot ? { } 12 | , externalArgs ? { } 13 | , depotOverlays ? true 14 | , localSystem ? externalArgs.localSystem or builtins.currentSystem 15 | , crossSystem ? externalArgs.crossSystem or localSystem 16 | # additional overlays to be applied. 17 | # Useful when calling this file in a view exported from depot. 18 | , additionalOverlays ? [ ] 19 | , ... 20 | }: 21 | 22 | let 23 | # Arguments passed to both the stable nixpkgs and the main, unstable one. 24 | # Includes everything but overlays which are only passed to unstable nixpkgs. 25 | commonNixpkgsArgs = { 26 | # allow users to inject their config into builds (e.g. to test CA derivations) 27 | config = 28 | (if externalArgs ? nixpkgsConfig then externalArgs.nixpkgsConfig else { }) 29 | // { 30 | allowUnfree = true; 31 | allowUnfreeRedistributable = true; 32 | allowBroken = true; 33 | # Forbids our meta.ci attribute 34 | # https://github.com/NixOS/nixpkgs/pull/191171#issuecomment-1260650771 35 | checkMeta = false; 36 | }; 37 | 38 | inherit localSystem crossSystem; 39 | }; 40 | 41 | # import the nixos-unstable package set, or optionally use the 42 | # source (e.g. a path) specified by the `nixpkgsBisectPath` 43 | # argument. This is intended for use-cases where the depot is 44 | # bisected against nixpkgs to find the root cause of an issue in a 45 | # channel bump. 46 | nixpkgsSrc = externalArgs.nixpkgsBisectPath or depot.third_party.sources.nixpkgs; 47 | 48 | # Stable package set is imported, but not exposed, to overlay 49 | # required packages into the unstable set. 50 | stableNixpkgs = import depot.third_party.sources.nixpkgs-stable commonNixpkgsArgs; 51 | 52 | # Overlay for packages that should come from the stable channel 53 | # instead (e.g. because something is broken in unstable). 54 | # Use `stableNixpkgs` from above. 55 | stableOverlay = _unstableSelf: unstableSuper: { 56 | # newer trunk fails somewhere within reqwest, trying to read a mystery file 57 | trunk = stableNixpkgs.trunk; 58 | }; 59 | 60 | # Overlay to expose the nixpkgs commits we are using to other Nix code. 61 | commitsOverlay = _: _: { 62 | nixpkgsCommits = { 63 | unstable = depot.third_party.sources.nixpkgs.rev; 64 | stable = depot.third_party.sources.nixpkgs-stable.rev; 65 | }; 66 | }; 67 | in 68 | import nixpkgsSrc (commonNixpkgsArgs // { 69 | overlays = [ 70 | commitsOverlay 71 | stableOverlay 72 | ] ++ (if depotOverlays then [ 73 | depot.third_party.overlays.haskell 74 | depot.third_party.overlays.tvl 75 | depot.third_party.overlays.ecl-static 76 | depot.third_party.overlays.dhall 77 | (import depot.third_party.sources.rust-overlay) 78 | ] else [ ] ++ additionalOverlays); 79 | }) 80 | -------------------------------------------------------------------------------- /readTree/README.md: -------------------------------------------------------------------------------- 1 | readTree 2 | ======== 3 | 4 | This is a Nix program that builds up an attribute set tree for a large 5 | repository based on the filesystem layout. 6 | 7 | It is in fact the tool that lays out the attribute set of this repository. 8 | 9 | As an example, consider a root (`.`) of a repository and a layout such as: 10 | 11 | ``` 12 | . 13 | ├── third_party 14 | │   ├── default.nix 15 | │   └── rustpkgs 16 | │   ├── aho-corasick.nix 17 | │   └── serde.nix 18 | └── tools 19 | ├── cheddar 20 | │   └── default.nix 21 | └── roquefort.nix 22 | ``` 23 | 24 | When `readTree` is called on that tree, it will construct an attribute set with 25 | this shape: 26 | 27 | ```nix 28 | { 29 | tools = { 30 | cheddar = ...; 31 | roquefort = ...; 32 | }; 33 | 34 | third_party = { 35 | # the `default.nix` of this folder might have had arbitrary other 36 | # attributes here, such as this: 37 | favouriteColour = "orange"; 38 | 39 | rustpkgs = { 40 | aho-corasick = ...; 41 | serde = ...; 42 | }; 43 | }; 44 | } 45 | ``` 46 | 47 | Every imported Nix file that yields an attribute set will have a `__readTree = 48 | true;` attribute merged into it. 49 | 50 | ## Traversal logic 51 | 52 | `readTree` will follow any subdirectories of a tree and import all Nix files, 53 | with some exceptions: 54 | 55 | * If a folder contains a `default.nix` file, no *sibling* Nix files will be 56 | imported - however children are traversed as normal. 57 | * If a folder contains a `default.nix` it is loaded and, if it 58 | evaluates to a set, *merged* with the children. If it evaluates to 59 | anything other than a set, else the children are *not traversed*. 60 | * A folder can opt out from readTree completely by containing a 61 | `.skip-tree` file. The content of the file is not read. These 62 | folders will be missing completely from the readTree structure. 63 | * A folder can declare that its children are off-limit by containing a 64 | `.skip-subtree` file. Since the content of the file is not checked, it can be 65 | useful to leave a note for a human in the file. 66 | * The `default.nix` of the top-level folder on which readTree is 67 | called is **not** read to avoid infinite recursion (as, presumably, 68 | this file is where readTree itself is called). 69 | 70 | Traversal is lazy, `readTree` will only build up the tree as requested. This 71 | currently has the downside that directories with no importable files end up in 72 | the tree as empty nodes (`{}`). 73 | 74 | ## Import structure 75 | 76 | `readTree` is called with an argument set containing a few parameters: 77 | 78 | * `path`: Initial path at which to start the traversal. 79 | * `args`: Arguments to pass to all imports. 80 | * `filter`: (optional) A function to filter the argument set on each 81 | import based on the location in the tree. This can be used to, for 82 | example, implement a "visibility" system inside of a tree. 83 | * `scopedArgs`: (optional) An argument set that is passed to all 84 | imported files via `builtins.scopedImport`. This will forcefully 85 | override the given values in the import scope, use with care! 86 | 87 | The package headers in this repository follow the form `{ pkgs, ... }:` where 88 | `pkgs` is a fixed-point of the entire package tree (see the `default.nix` at the 89 | root of the depot). 90 | 91 | In theory `readTree` can pass arguments of different shapes, but I have found 92 | this to be a good solution for the most part. 93 | 94 | Note that `readTree` does not currently make functions overridable, though it is 95 | feasible that it could do that in the future. 96 | -------------------------------------------------------------------------------- /readTree/default.nix: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019 Vincent Ambo 2 | # Copyright (c) 2020-2021 The TVL Authors 3 | # SPDX-License-Identifier: MIT 4 | # 5 | # Provides a function to automatically read a filesystem structure 6 | # into a Nix attribute set. 7 | # 8 | # Called with an attribute set taking the following arguments: 9 | # 10 | # path: Path to a directory from which to start reading the tree. 11 | # 12 | # args: Argument set to pass to each imported file. 13 | # 14 | # filter: Function to filter `args` based on the tree location. This should 15 | # be a function of the form `args -> location -> args`, where the 16 | # location is a list of strings representing the path components of 17 | # the current readTree target. Optional. 18 | { ... }: 19 | 20 | let 21 | inherit (builtins) 22 | attrNames 23 | concatMap 24 | concatStringsSep 25 | elem 26 | elemAt 27 | filter 28 | hasAttr 29 | head 30 | isAttrs 31 | listToAttrs 32 | map 33 | match 34 | readDir 35 | substring; 36 | 37 | argsWithPath = args: parts: 38 | let meta.locatedAt = parts; 39 | in meta // (if isAttrs args then args else args meta); 40 | 41 | readDirVisible = path: 42 | let 43 | children = readDir path; 44 | # skip hidden files, except for those that contain special instructions to readTree 45 | isVisible = f: f == ".skip-subtree" || f == ".skip-tree" || (substring 0 1 f) != "."; 46 | names = filter isVisible (attrNames children); 47 | in 48 | listToAttrs (map 49 | (name: { 50 | inherit name; 51 | value = children.${name}; 52 | }) 53 | names); 54 | 55 | # Create a mark containing the location of this attribute and 56 | # a list of all child attribute names added by readTree. 57 | marker = parts: children: { 58 | __readTree = parts; 59 | __readTreeChildren = builtins.attrNames children; 60 | }; 61 | 62 | # Create a label from a target's tree location. 63 | mkLabel = target: 64 | let label = concatStringsSep "/" target.__readTree; 65 | in if target ? __subtarget 66 | then "${label}:${target.__subtarget}" 67 | else label; 68 | 69 | # Merge two attribute sets, but place attributes in `passthru` via 70 | # `overrideAttrs` for derivation targets that support it. 71 | merge = a: b: 72 | if a ? overrideAttrs 73 | then 74 | a.overrideAttrs 75 | (prev: { 76 | passthru = (prev.passthru or { }) // b; 77 | }) 78 | else a // b; 79 | 80 | # Import a file and enforce our calling convention 81 | importFile = args: scopedArgs: path: parts: filter: 82 | let 83 | importedFile = 84 | if scopedArgs != { } && builtins ? scopedImport # For tvix 85 | then builtins.scopedImport scopedArgs path 86 | else import path; 87 | pathType = builtins.typeOf importedFile; 88 | in 89 | if pathType != "lambda" 90 | then throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }" 91 | else importedFile (filter parts (argsWithPath args parts)); 92 | 93 | nixFileName = file: 94 | let res = match "(.*)\\.nix" file; 95 | in if res == null then null else head res; 96 | 97 | # Internal implementation of readTree, which handles things like the 98 | # skipping of trees and subtrees. 99 | # 100 | # This method returns an attribute sets with either of two shapes: 101 | # 102 | # { ok = ...; } # a tree was read successfully 103 | # { skip = true; } # a tree was skipped 104 | # 105 | # The higher-level `readTree` method assembles the final attribute 106 | # set out of these results at the top-level, and the internal 107 | # `children` implementation unwraps and processes nested trees. 108 | readTreeImpl = { args, initPath, rootDir, parts, argsFilter, scopedArgs }: 109 | let 110 | dir = readDirVisible initPath; 111 | 112 | # Determine whether any part of this tree should be skipped. 113 | # 114 | # Adding a `.skip-subtree` file will still allow the import of 115 | # the current node's "default.nix" file, but stop recursion 116 | # there. 117 | # 118 | # Adding a `.skip-tree` file will completely ignore the folder 119 | # in which this file is located. 120 | skipTree = hasAttr ".skip-tree" dir; 121 | skipSubtree = skipTree || hasAttr ".skip-subtree" dir; 122 | 123 | joinChild = c: initPath + ("/" + c); 124 | 125 | self = 126 | if rootDir 127 | then { __readTree = [ ]; } 128 | else importFile args scopedArgs initPath parts argsFilter; 129 | 130 | # Import subdirectories of the current one, unless any skip 131 | # instructions exist. 132 | # 133 | # This file can optionally contain information on why the tree 134 | # should be ignored, but its content is not inspected by 135 | # readTree 136 | filterDir = f: dir."${f}" == "directory"; 137 | filteredChildren = map 138 | (c: { 139 | name = c; 140 | value = readTreeImpl { 141 | inherit argsFilter scopedArgs; 142 | args = args; 143 | initPath = (joinChild c); 144 | rootDir = false; 145 | parts = (parts ++ [ c ]); 146 | }; 147 | }) 148 | (filter filterDir (attrNames dir)); 149 | 150 | # Remove skipped children from the final set, and unwrap the 151 | # result set. 152 | children = 153 | if skipSubtree then [ ] 154 | else map ({ name, value }: { inherit name; value = value.ok; }) (filter (child: child.value ? ok) filteredChildren); 155 | 156 | # Import Nix files 157 | nixFiles = 158 | if skipSubtree then [ ] 159 | else filter (f: f != null) (map nixFileName (attrNames dir)); 160 | nixChildren = map 161 | (c: 162 | let 163 | p = joinChild (c + ".nix"); 164 | childParts = parts ++ [ c ]; 165 | imported = importFile args scopedArgs p childParts argsFilter; 166 | in 167 | { 168 | name = c; 169 | value = 170 | if isAttrs imported 171 | then merge imported (marker childParts { }) 172 | else imported; 173 | }) 174 | nixFiles; 175 | 176 | nodeValue = if dir ? "default.nix" then self else { }; 177 | 178 | allChildren = listToAttrs ( 179 | if dir ? "default.nix" 180 | then children 181 | else nixChildren ++ children 182 | ); 183 | 184 | in 185 | if skipTree 186 | then { skip = true; } 187 | else { 188 | ok = 189 | if isAttrs nodeValue 190 | then merge nodeValue (allChildren // (marker parts allChildren)) 191 | else nodeValue; 192 | }; 193 | 194 | # Top-level implementation of readTree itself. 195 | readTree = args: 196 | let 197 | tree = readTreeImpl args; 198 | in 199 | if tree ? skip 200 | then throw "Top-level folder has a .skip-tree marker and could not be read by readTree!" 201 | else tree.ok; 202 | 203 | # Helper function to fetch subtargets from a target. This is a 204 | # temporary helper to warn on the use of the `meta.targets` 205 | # attribute, which is deprecated in favour of `meta.ci.targets`. 206 | subtargets = node: 207 | let targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]); 208 | in if node ? meta.targets then 209 | builtins.trace '' 210 | Warning: The meta.targets attribute is deprecated. 211 | 212 | Please move the subtargets of //${mkLabel node} to the 213 | meta.ci.targets attribute. 214 |  215 | '' 216 | targets else targets; 217 | 218 | # Function which can be used to find all readTree targets within an 219 | # attribute set. 220 | # 221 | # This function will gather physical targets, that is targets which 222 | # correspond directly to a location in the repository, as well as 223 | # subtargets (specified in the meta.ci.targets attribute of a node). 224 | # 225 | # This can be used to discover targets for inclusion in CI 226 | # pipelines. 227 | # 228 | # Called with the arguments: 229 | # 230 | # eligible: Function to determine whether the given derivation 231 | # should be included in the build. 232 | gather = eligible: node: 233 | if node ? __readTree then 234 | # Include the node itself if it is eligible. 235 | (if eligible node then [ node ] else [ ]) 236 | # Include eligible children of the node 237 | ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren) 238 | # Include specified sub-targets of the node 239 | ++ filter eligible (map 240 | (k: (node."${k}" or { }) // { 241 | # Keep the same tree location, but explicitly mark this 242 | # node as a subtarget. 243 | __readTree = node.__readTree; 244 | __readTreeChildren = [ ]; 245 | __subtarget = k; 246 | }) 247 | (subtargets node)) 248 | else [ ]; 249 | 250 | # Determine whether a given value is a derivation. 251 | # Copied from nixpkgs/lib for cases where lib is not available yet. 252 | isDerivation = x: isAttrs x && x ? type && x.type == "derivation"; 253 | in 254 | { 255 | inherit gather mkLabel; 256 | 257 | __functor = _: 258 | { path 259 | , args 260 | , filter ? (_parts: x: x) 261 | , scopedArgs ? { } 262 | , rootDir ? true 263 | }: 264 | readTree { 265 | inherit args scopedArgs rootDir; 266 | argsFilter = filter; 267 | initPath = path; 268 | parts = [ ]; 269 | }; 270 | 271 | # In addition to readTree itself, some functionality is exposed that 272 | # is useful for users of readTree. 273 | 274 | # Create a readTree filter disallowing access to the specified 275 | # top-level folder in the repository, except for specific exceptions 276 | # specified by their (full) paths. 277 | # 278 | # Called with the arguments: 279 | # 280 | # folder: Name of the restricted top-level folder (e.g. 'experimental') 281 | # 282 | # exceptions: List of readTree parts (e.g. [ [ "services" "some-app" ] ]), 283 | # which should be able to access the restricted folder. 284 | # 285 | # reason: Textual explanation for the restriction (included in errors) 286 | restrictFolder = { folder, exceptions ? [ ], reason }: parts: args: 287 | if (elemAt parts 0) == folder || elem parts exceptions 288 | then args 289 | else args // { 290 | depot = args.depot // { 291 | "${folder}" = throw '' 292 | Access to targets under //${folder} is not permitted from 293 | other repository paths. Specific exceptions are configured 294 | at the top-level. 295 | 296 | ${reason} 297 | At location: ${builtins.concatStringsSep "." parts} 298 | ''; 299 | }; 300 | }; 301 | 302 | # This definition of fix is identical to .lib.fix, but is 303 | # provided here for cases where readTree is used before nixpkgs can 304 | # be imported. 305 | # 306 | # It is often required to create the args attribute set. 307 | fix = f: let x = f x; in x; 308 | 309 | # Takes an attribute set and adds a meta.ci.targets attribute to it 310 | # which contains all direct children of the attribute set which are 311 | # derivations. 312 | # 313 | # Type: attrs -> attrs 314 | drvTargets = attrs: 315 | attrs // { 316 | # preserve .meta from original attrs 317 | meta = (attrs.meta or { }) // { 318 | # preserve .meta.ci (except .targets) from original attrs 319 | ci = (attrs.meta.ci or { }) // { 320 | targets = builtins.filter 321 | (x: isDerivation attrs."${x}") 322 | (builtins.attrNames attrs); 323 | }; 324 | }; 325 | }; 326 | } 327 | -------------------------------------------------------------------------------- /readTree/tests/.skip-subtree: -------------------------------------------------------------------------------- 1 | These tests call their own readTree, so the toplevel one shouldn’t bother 2 | -------------------------------------------------------------------------------- /readTree/tests/default.nix: -------------------------------------------------------------------------------- 1 | { depot, lib, ... }: 2 | 3 | let 4 | inherit (depot.nix.runTestsuite) 5 | runTestsuite 6 | it 7 | assertEq 8 | assertThrows 9 | ; 10 | 11 | tree-ex = depot.nix.readTree { 12 | path = ./test-example; 13 | args = { }; 14 | }; 15 | 16 | example = it "corresponds to the README example" [ 17 | (assertEq "third_party attrset" 18 | (lib.isAttrs tree-ex.third_party 19 | && (! lib.isDerivation tree-ex.third_party)) 20 | true) 21 | (assertEq "third_party attrset other attribute" 22 | tree-ex.third_party.favouriteColour 23 | "orange") 24 | (assertEq "rustpkgs attrset aho-corasick" 25 | tree-ex.third_party.rustpkgs.aho-corasick 26 | "aho-corasick") 27 | (assertEq "rustpkgs attrset serde" 28 | tree-ex.third_party.rustpkgs.serde 29 | "serde") 30 | (assertEq "tools cheddear" 31 | "cheddar" 32 | tree-ex.tools.cheddar) 33 | (assertEq "tools roquefort" 34 | tree-ex.tools.roquefort 35 | "roquefort") 36 | ]; 37 | 38 | tree-tl = depot.nix.readTree { 39 | path = ./test-tree-traversal; 40 | args = { }; 41 | }; 42 | 43 | traversal-logic = it "corresponds to the traversal logic in the README" [ 44 | (assertEq "skip-tree/a is read" 45 | tree-tl.skip-tree.a 46 | "a is read normally") 47 | (assertEq "skip-tree does not contain b" 48 | (builtins.attrNames tree-tl.skip-tree) 49 | [ "__readTree" "__readTreeChildren" "a" ]) 50 | (assertEq "skip-tree children list does not contain b" 51 | tree-tl.skip-tree.__readTreeChildren 52 | [ "a" ]) 53 | 54 | (assertEq "skip subtree default.nix is read" 55 | tree-tl.skip-subtree.but 56 | "the default.nix is still read") 57 | (assertEq "skip subtree a/default.nix is skipped" 58 | (tree-tl.skip-subtree ? a) 59 | false) 60 | (assertEq "skip subtree b/c.nix is skipped" 61 | (tree-tl.skip-subtree ? b) 62 | false) 63 | (assertEq "skip subtree a/default.nix would be read without .skip-subtree" 64 | (tree-tl.no-skip-subtree.a) 65 | "am I subtree yet?") 66 | (assertEq "skip subtree b/c.nix would be read without .skip-subtree" 67 | (tree-tl.no-skip-subtree.b.c) 68 | "cool") 69 | 70 | (assertEq "default.nix attrset is merged with siblings" 71 | tree-tl.default-nix.no 72 | "siblings should be read") 73 | (assertEq "default.nix means sibling isn’t read" 74 | (tree-tl.default-nix ? sibling) 75 | false) 76 | (assertEq "default.nix means subdirs are still read and merged into default.nix" 77 | (tree-tl.default-nix.subdir.a) 78 | "but I’m picked up") 79 | 80 | (assertEq "default.nix can be not an attrset" 81 | tree-tl.default-nix.no-merge 82 | "I’m not merged with any children") 83 | (assertEq "default.nix is not an attrset -> children are not merged" 84 | (tree-tl.default-nix.no-merge ? subdir) 85 | false) 86 | 87 | (assertEq "default.nix can contain a derivation" 88 | (lib.isDerivation tree-tl.default-nix.can-be-drv) 89 | true) 90 | (assertEq "Even if default.nix is a derivation, children are traversed and merged" 91 | tree-tl.default-nix.can-be-drv.subdir.a 92 | "Picked up through the drv") 93 | (assertEq "default.nix drv is not changed by readTree" 94 | tree-tl.default-nix.can-be-drv 95 | (import ./test-tree-traversal/default-nix/can-be-drv/default.nix { })) 96 | ]; 97 | 98 | # these each call readTree themselves because the throws have to happen inside assertThrows 99 | wrong = it "cannot read these files and will complain" [ 100 | (assertThrows "this file is not a function" 101 | (depot.nix.readTree { 102 | path = ./test-wrong-not-a-function; 103 | args = { }; 104 | }).not-a-function) 105 | # can’t test for that, assertThrows can’t catch this error 106 | # (assertThrows "this file is a function but doesn’t have dots" 107 | # (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function) 108 | ]; 109 | 110 | read-markers = depot.nix.readTree { 111 | path = ./test-marker; 112 | args = { }; 113 | }; 114 | 115 | assertMarkerByPath = path: 116 | assertEq "${lib.concatStringsSep "." path} is marked correctly" 117 | (lib.getAttrFromPath path read-markers).__readTree 118 | path; 119 | 120 | markers = it "marks nodes correctly" [ 121 | (assertMarkerByPath [ "directory-marked" ]) 122 | (assertMarkerByPath [ "directory-marked" "nested" ]) 123 | (assertMarkerByPath [ "file-children" "one" ]) 124 | (assertMarkerByPath [ "file-children" "two" ]) 125 | (assertEq "nix file children are marked correctly" 126 | read-markers.file-children.__readTreeChildren [ "one" "two" ]) 127 | (assertEq "directory children are marked correctly" 128 | read-markers.directory-marked.__readTreeChildren [ "nested" ]) 129 | (assertEq "absence of children is marked" 130 | read-markers.directory-marked.nested.__readTreeChildren [ ]) 131 | ]; 132 | 133 | in 134 | runTestsuite "readTree" [ 135 | example 136 | traversal-logic 137 | wrong 138 | markers 139 | ] 140 | -------------------------------------------------------------------------------- /readTree/tests/test-example/third_party/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { 4 | favouriteColour = "orange"; 5 | } 6 | -------------------------------------------------------------------------------- /readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix: -------------------------------------------------------------------------------- 1 | { ... }: "aho-corasick" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-example/third_party/rustpkgs/serde.nix: -------------------------------------------------------------------------------- 1 | { ... }: "serde" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-example/tools/cheddar/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: "cheddar" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-example/tools/roquefort.nix: -------------------------------------------------------------------------------- 1 | { ... }: "roquefort" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-marker/directory-marked/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { } 4 | -------------------------------------------------------------------------------- /readTree/tests/test-marker/directory-marked/nested/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { } 4 | -------------------------------------------------------------------------------- /readTree/tests/test-marker/file-children/one.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { } 4 | -------------------------------------------------------------------------------- /readTree/tests/test-marker/file-children/two.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { } 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | derivation { 3 | name = "im-a-drv"; 4 | system = builtins.currentSystem; 5 | builder = "/bin/sh"; 6 | args = [ "-c" ''echo "" > $out'' ]; 7 | } 8 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "Picked up through the drv" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { 4 | no = "siblings should be read"; 5 | } 6 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "I’m not merged with any children" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix: -------------------------------------------------------------------------------- 1 | "not accessible since parent default.nix is not an attrset" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/sibling.nix: -------------------------------------------------------------------------------- 1 | "I’m left alone" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/default-nix/subdir/a.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "but I’m picked up" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "am I subtree yet?" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "cool" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/no-skip-subtree/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { 4 | but = "the default.nix is still read"; 5 | } 6 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree: -------------------------------------------------------------------------------- 1 | this file makes subdirs be skipped, I hope 2 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-subtree/a/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "am I subtree yet?" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-subtree/b/c.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | "cool" 4 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-subtree/default.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | 3 | { 4 | but = "the default.nix is still read"; 5 | } 6 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-tree/a/default.nix: -------------------------------------------------------------------------------- 1 | _: "a is read normally" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-tree/b/.skip-tree: -------------------------------------------------------------------------------- 1 | b subfolder should be skipped completely 2 | -------------------------------------------------------------------------------- /readTree/tests/test-tree-traversal/skip-tree/b/default.nix: -------------------------------------------------------------------------------- 1 | throw "b is skipped completely" 2 | -------------------------------------------------------------------------------- /readTree/tests/test-wrong-no-dots/no-dots-in-function.nix: -------------------------------------------------------------------------------- 1 | {}: 2 | 3 | "This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments." 4 | -------------------------------------------------------------------------------- /readTree/tests/test-wrong-not-a-function/not-a-function.nix: -------------------------------------------------------------------------------- 1 | "This file needs to be a function, otherwise readTree doesn’t like it!" 2 | -------------------------------------------------------------------------------- /sources/default.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by Niv. 2 | _: 3 | 4 | let 5 | 6 | # 7 | # The fetchers. fetch_ fetches specs of type . 8 | # 9 | 10 | fetch_file = pkgs: name: spec: 11 | let 12 | name' = sanitizeName name + "-src"; 13 | in 14 | if spec.builtin or true then 15 | builtins_fetchurl { inherit (spec) url sha256; name = name'; } 16 | else 17 | pkgs.fetchurl { inherit (spec) url sha256; name = name'; }; 18 | 19 | fetch_tarball = pkgs: name: spec: 20 | let 21 | name' = sanitizeName name + "-src"; 22 | in 23 | if spec.builtin or true then 24 | builtins_fetchTarball { name = name'; inherit (spec) url sha256; } 25 | else 26 | pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; 27 | 28 | fetch_git = name: spec: 29 | let 30 | ref = 31 | spec.ref or ( 32 | if spec ? branch then "refs/heads/${spec.branch}" else 33 | if spec ? tag then "refs/tags/${spec.tag}" else 34 | abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!" 35 | ); 36 | submodules = spec.submodules or false; 37 | submoduleArg = 38 | let 39 | nixSupportsSubmodules = builtins.compareVersions builtins.nixVersion "2.4" >= 0; 40 | emptyArgWithWarning = 41 | if submodules 42 | then 43 | builtins.trace 44 | ( 45 | "The niv input \"${name}\" uses submodules " 46 | + "but your nix's (${builtins.nixVersion}) builtins.fetchGit " 47 | + "does not support them" 48 | ) 49 | { } 50 | else { }; 51 | in 52 | if nixSupportsSubmodules 53 | then { inherit submodules; } 54 | else emptyArgWithWarning; 55 | in 56 | builtins.fetchGit 57 | ({ url = spec.repo; inherit (spec) rev; inherit ref; } // submoduleArg); 58 | 59 | fetch_local = spec: spec.path; 60 | 61 | fetch_builtin-tarball = name: throw 62 | ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. 63 | $ niv modify ${name} -a type=tarball -a builtin=true''; 64 | 65 | fetch_builtin-url = name: throw 66 | ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. 67 | $ niv modify ${name} -a type=file -a builtin=true''; 68 | 69 | # 70 | # Various helpers 71 | # 72 | 73 | # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695 74 | sanitizeName = name: 75 | ( 76 | concatMapStrings (s: if builtins.isList s then "-" else s) 77 | ( 78 | builtins.split "[^[:alnum:]+._?=-]+" 79 | ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name) 80 | ) 81 | ); 82 | 83 | # The set of packages used when specs are fetched using non-builtins. 84 | mkPkgs = sources: system: 85 | let 86 | sourcesNixpkgs = 87 | import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; }; 88 | hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; 89 | hasThisAsNixpkgsPath = == ./.; 90 | in 91 | if builtins.hasAttr "nixpkgs" sources 92 | then sourcesNixpkgs 93 | else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then 94 | import { } 95 | else 96 | abort 97 | '' 98 | Please specify either (through -I or NIX_PATH=nixpkgs=...) or 99 | add a package called "nixpkgs" to your sources.json. 100 | ''; 101 | 102 | # The actual fetching function. 103 | fetch = pkgs: name: spec: 104 | 105 | if ! builtins.hasAttr "type" spec then 106 | abort "ERROR: niv spec ${name} does not have a 'type' attribute" 107 | else if spec.type == "file" then fetch_file pkgs name spec 108 | else if spec.type == "tarball" then fetch_tarball pkgs name spec 109 | else if spec.type == "git" then fetch_git name spec 110 | else if spec.type == "local" then fetch_local spec 111 | else if spec.type == "builtin-tarball" then fetch_builtin-tarball name 112 | else if spec.type == "builtin-url" then fetch_builtin-url name 113 | else 114 | abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; 115 | 116 | # If the environment variable NIV_OVERRIDE_${name} is set, then use 117 | # the path directly as opposed to the fetched source. 118 | replace = name: drv: 119 | let 120 | saneName = stringAsChars (c: if (builtins.match "[a-zA-Z0-9]" c) == null then "_" else c) name; 121 | ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}"; 122 | in 123 | if ersatz == "" then drv else 124 | # this turns the string into an actual Nix path (for both absolute and 125 | # relative paths) 126 | if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}"; 127 | 128 | # Ports of functions for older nix versions 129 | 130 | # a Nix version of mapAttrs if the built-in doesn't exist 131 | mapAttrs = builtins.mapAttrs or ( 132 | f: set: with builtins; 133 | listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) 134 | ); 135 | 136 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 137 | range = first: last: if first > last then [ ] else builtins.genList (n: first + n) (last - first + 1); 138 | 139 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 140 | stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); 141 | 142 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 143 | stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); 144 | concatMapStrings = f: list: concatStrings (map f list); 145 | concatStrings = builtins.concatStringsSep ""; 146 | 147 | # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331 148 | optionalAttrs = cond: as: if cond then as else { }; 149 | 150 | # fetchTarball version that is compatible between all the versions of Nix 151 | builtins_fetchTarball = { url, name ? null, sha256 }@attrs: 152 | let 153 | inherit (builtins) lessThan nixVersion fetchTarball; 154 | in 155 | if lessThan nixVersion "1.12" then 156 | fetchTarball ({ inherit url; } // (optionalAttrs (name != null) { inherit name; })) 157 | else 158 | fetchTarball attrs; 159 | 160 | # fetchurl version that is compatible between all the versions of Nix 161 | builtins_fetchurl = { url, name ? null, sha256 }@attrs: 162 | let 163 | inherit (builtins) lessThan nixVersion fetchurl; 164 | in 165 | if lessThan nixVersion "1.12" then 166 | fetchurl ({ inherit url; } // (optionalAttrs (name != null) { inherit name; })) 167 | else 168 | fetchurl attrs; 169 | 170 | # Create the final "sources" from the config 171 | mkSources = config: 172 | mapAttrs 173 | ( 174 | name: spec: 175 | if builtins.hasAttr "outPath" spec 176 | then 177 | abort 178 | "The values in sources.json should not have an 'outPath' attribute" 179 | else 180 | spec // { outPath = replace name (fetch config.pkgs name spec); } 181 | ) 182 | config.sources; 183 | 184 | # The "config" used by the fetchers 185 | mkConfig = 186 | { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null 187 | , sources ? if sourcesFile == null then { } else builtins.fromJSON (builtins.readFile sourcesFile) 188 | , system ? builtins.currentSystem 189 | , pkgs ? mkPkgs sources system 190 | }: rec { 191 | # The sources, i.e. the attribute set of spec name to spec 192 | inherit sources; 193 | 194 | # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers 195 | inherit pkgs; 196 | }; 197 | 198 | in 199 | mkSources (mkConfig { }) // { __functor = _: settings: mkSources (mkConfig settings); } 200 | -------------------------------------------------------------------------------- /sources/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "agenix": { 3 | "branch": "main", 4 | "description": "age-encrypted secrets for NixOS", 5 | "homepage": "https://matrix.to/#/#agenix:nixos.org", 6 | "owner": "ryantm", 7 | "repo": "agenix", 8 | "rev": "4835b1dc898959d8547a871ef484930675cb47f1", 9 | "sha256": "0ngkhf7qamibhbl9z1dryzscd36y4fz1m1h6fb2z6fylw0b8029p", 10 | "type": "tarball", 11 | "url": "https://github.com/ryantm/agenix/archive/4835b1dc898959d8547a871ef484930675cb47f1.tar.gz", 12 | "url_template": "https://github.com///archive/.tar.gz" 13 | }, 14 | "bqn-libs": { 15 | "branch": "master", 16 | "description": "Informal collection of BQN utilities", 17 | "homepage": "", 18 | "owner": "mlochbaum", 19 | "repo": "bqn-libs", 20 | "rev": "9acc6ce8956b316109c4bdddc970ce4316842f70", 21 | "sha256": "0gjkw3jryiq6ad1pg5ra5d40fbw7m720gfbwsa81hqp070lpv6dh", 22 | "type": "tarball", 23 | "url": "https://github.com/mlochbaum/bqn-libs/archive/9acc6ce8956b316109c4bdddc970ce4316842f70.tar.gz", 24 | "url_template": "https://github.com///archive/.tar.gz" 25 | }, 26 | "gitignore.nix": { 27 | "branch": "master", 28 | "description": "Nix functions for filtering local git sources", 29 | "gcroot": true, 30 | "homepage": "", 31 | "owner": "hercules-ci", 32 | "repo": "gitignore.nix", 33 | "rev": "637db329424fd7e46cf4185293b9cc8c88c95394", 34 | "sha256": "02wxkdpbhlm3yk5mhkhsp3kwakc16xpmsf2baw57nz1dg459qv8w", 35 | "type": "tarball", 36 | "url": "https://github.com/hercules-ci/gitignore.nix/archive/637db329424fd7e46cf4185293b9cc8c88c95394.tar.gz", 37 | "url_template": "https://github.com///archive/.tar.gz" 38 | }, 39 | "home-manager": { 40 | "branch": "master", 41 | "description": "Manage a user environment using Nix [maintainer=@rycee] ", 42 | "gcroot": true, 43 | "homepage": "https://nix-community.github.io/home-manager/", 44 | "owner": "nix-community", 45 | "repo": "home-manager", 46 | "rev": "379c9fb858ef9abe92d5590e7502a7c1387c076a", 47 | "sha256": "0gjkw9vr2rhzip3xm8n1q3wnqhxacnn2ap7n3lfbz2qqmrrfvm7d", 48 | "type": "tarball", 49 | "url": "https://github.com/nix-community/home-manager/archive/379c9fb858ef9abe92d5590e7502a7c1387c076a.tar.gz", 50 | "url_template": "https://github.com///archive/.tar.gz" 51 | }, 52 | "impermanence": { 53 | "branch": "master", 54 | "description": "Modules to help you handle persistent state on systems with ephemeral root storage [maintainer=@talyz]", 55 | "homepage": "", 56 | "owner": "nix-community", 57 | "repo": "impermanence", 58 | "rev": "4b3e914cdf97a5b536a889e939fb2fd2b043a170", 59 | "sha256": "04l16szln2x0ajq2x799krb53ykvc6vm44x86ppy1jg9fr82161c", 60 | "type": "tarball", 61 | "url": "https://github.com/nix-community/impermanence/archive/4b3e914cdf97a5b536a889e939fb2fd2b043a170.tar.gz", 62 | "url_template": "https://github.com///archive/.tar.gz" 63 | }, 64 | "naersk": { 65 | "branch": "master", 66 | "description": "Build rust crates in Nix. No configuration, no code generation, no IFD. Sandbox friendly. [maintainer: @Patryk27]", 67 | "homepage": "", 68 | "owner": "nmattia", 69 | "repo": "naersk", 70 | "rev": "38bc60bbc157ae266d4a0c96671c6c742ee17a5f", 71 | "sha256": "0ggi4sjrpdhr1qg9p93n0ac7bqx9b76cqq2kdzjzls3lm4qh083k", 72 | "type": "tarball", 73 | "url": "https://github.com/nmattia/naersk/archive/38bc60bbc157ae266d4a0c96671c6c742ee17a5f.tar.gz", 74 | "url_template": "https://github.com///archive/.tar.gz" 75 | }, 76 | "napalm": { 77 | "branch": "master", 78 | "description": "Support for building npm packages in Nix and lightweight npm registry [maintainer @nmattia]", 79 | "homepage": "", 80 | "owner": "nix-community", 81 | "repo": "napalm", 82 | "rev": "e1babff744cd278b56abe8478008b4a9e23036cf", 83 | "sha256": "04h62p4hxw7fhclki7hcn739hhig3rh9q4njp24j7bm0dk2kj8h6", 84 | "type": "tarball", 85 | "url": "https://github.com/nix-community/napalm/archive/e1babff744cd278b56abe8478008b4a9e23036cf.tar.gz", 86 | "url_template": "https://github.com///archive/.tar.gz" 87 | }, 88 | "nixpkgs": { 89 | "branch": "nixos-unstable", 90 | "description": "Nix Packages collection", 91 | "gcroot": true, 92 | "homepage": "", 93 | "owner": "NixOS", 94 | "repo": "nixpkgs", 95 | "rev": "96ec055edbe5ee227f28cdbc3f1ddf1df5965102", 96 | "sha256": "064q32jmj54iwgh4lcdg2aii75cr5lvb683ym5pmj2kkj340pnpd", 97 | "type": "tarball", 98 | "url": "https://github.com/NixOS/nixpkgs/archive/96ec055edbe5ee227f28cdbc3f1ddf1df5965102.tar.gz", 99 | "url_template": "https://github.com///archive/.tar.gz" 100 | }, 101 | "nixpkgs-stable": { 102 | "branch": "nixos-23.11", 103 | "description": "Nix Packages collection", 104 | "gcroot": true, 105 | "homepage": "", 106 | "owner": "NixOS", 107 | "repo": "nixpkgs", 108 | "rev": "205fd4226592cc83fd4c0885a3e4c9c400efabb5", 109 | "sha256": "1f5d2g1p6nfwycpmrnnmc2xmcszp804adp16knjvdkj8nz36y1fg", 110 | "type": "tarball", 111 | "url": "https://github.com/NixOS/nixpkgs/archive/205fd4226592cc83fd4c0885a3e4c9c400efabb5.tar.gz", 112 | "url_template": "https://github.com///archive/.tar.gz" 113 | }, 114 | "rust-overlay": { 115 | "branch": "master", 116 | "description": "Pure and reproducible nix overlay of binary distributed rust toolchains", 117 | "gcroot": true, 118 | "homepage": "", 119 | "owner": "oxalica", 120 | "repo": "rust-overlay", 121 | "rev": "405ef13a5b80a0a4d4fc87c83554423d80e5f929", 122 | "sha256": "0jff7g5f64rgs93m0zs99c387rr1qskw64f94lahj38mv4zf2jck", 123 | "type": "tarball", 124 | "url": "https://github.com/oxalica/rust-overlay/archive/405ef13a5b80a0a4d4fc87c83554423d80e5f929.tar.gz", 125 | "url_template": "https://github.com///archive/.tar.gz" 126 | }, 127 | "rustsec-advisory-db": { 128 | "branch": "main", 129 | "description": "Security advisory database for Rust crates published through crates.io", 130 | "homepage": "https://rustsec.org", 131 | "owner": "RustSec", 132 | "repo": "advisory-db", 133 | "rev": "7727c950e41f37f03297583c21f800546318d7f1", 134 | "sha256": "04915dx81rmwh55flld1msd5n115pmj4qjzrp2zlrxqmw7x0f1da", 135 | "type": "tarball", 136 | "url": "https://github.com/RustSec/advisory-db/archive/7727c950e41f37f03297583c21f800546318d7f1.tar.gz", 137 | "url_template": "https://github.com///archive/.tar.gz" 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /stateMonad/default.nix: -------------------------------------------------------------------------------- 1 | # Simple state monad represented as 2 | # 3 | # stateMonad s a = s -> { state : s; value : a } 4 | # 5 | { ... }: 6 | 7 | rec { 8 | # 9 | # Monad 10 | # 11 | 12 | # Type: stateMonad s a -> (a -> stateMonad s b) -> stateMonad s b 13 | bind = action: f: state: 14 | let 15 | afterAction = action state; 16 | in 17 | (f afterAction.value) afterAction.state; 18 | 19 | # Type: stateMonad s a -> stateMonad s b -> stateMonad s b 20 | after = action1: action2: state: action2 (action1 state).state; 21 | 22 | # Type: stateMonad s (stateMonad s a) -> stateMonad s a 23 | join = action: bind action (action': action'); 24 | 25 | # Type: [a] -> (a -> stateMonad s b) -> stateMonad s null 26 | for_ = xs: f: 27 | builtins.foldl' 28 | (laterAction: x: 29 | after (f x) laterAction 30 | ) 31 | (pure null) 32 | xs; 33 | 34 | # 35 | # Applicative 36 | # 37 | 38 | # Type: a -> stateMonad s a 39 | pure = value: state: { inherit state value; }; 40 | 41 | # TODO(sterni): <*>, lift2, … 42 | 43 | # 44 | # Functor 45 | # 46 | 47 | # Type: (a -> b) -> stateMonad s a -> stateMonad s b 48 | fmap = f: action: bind action (result: pure (f result)); 49 | 50 | # 51 | # State Monad 52 | # 53 | 54 | # Type: (s -> s) -> stateMonad s null 55 | modify = f: state: { value = null; state = f state; }; 56 | 57 | # Type: stateMonad s s 58 | get = state: { value = state; inherit state; }; 59 | 60 | # Type: s -> stateMonad s null 61 | set = new: modify (_: new); 62 | 63 | # Type: str -> stateMonad set set.${str} 64 | getAttr = attr: fmap (state: state.${attr}) get; 65 | 66 | # Type: str -> (any -> any) -> stateMonad s null 67 | modifyAttr = attr: f: modify (state: state // { 68 | ${attr} = f state.${attr}; 69 | }); 70 | 71 | # Type: str -> any -> stateMonad s null 72 | setAttr = attr: value: modifyAttr attr (_: value); 73 | 74 | # Type: s -> stateMonad s a -> a 75 | run = state: action: (action state).value; 76 | } 77 | -------------------------------------------------------------------------------- /stateMonad/tests/default.nix: -------------------------------------------------------------------------------- 1 | { depot, ... }: 2 | 3 | let 4 | inherit (depot.nix.runTestsuite) 5 | runTestsuite 6 | it 7 | assertEq 8 | ; 9 | 10 | inherit (depot.nix.stateMonad) 11 | pure 12 | run 13 | join 14 | fmap 15 | bind 16 | get 17 | set 18 | modify 19 | after 20 | for_ 21 | getAttr 22 | setAttr 23 | modifyAttr 24 | ; 25 | 26 | runStateIndependent = run (throw "This should never be evaluated!"); 27 | in 28 | 29 | runTestsuite "stateMonad" [ 30 | (it "behaves correctly independent of state" [ 31 | (assertEq "pure" (runStateIndependent (pure 21)) 21) 32 | (assertEq "join pure" (runStateIndependent (join (pure (pure 42)))) 42) 33 | (assertEq "fmap pure" (runStateIndependent (fmap (builtins.mul 2) (pure 21))) 42) 34 | (assertEq "bind pure" (runStateIndependent (bind (pure 12) (x: pure x))) 12) 35 | ]) 36 | (it "behaves correctly with an integer state" [ 37 | (assertEq "get" (run 42 get) 42) 38 | (assertEq "after set get" (run 21 (after (set 42) get)) 42) 39 | (assertEq "after modify get" (run 21 (after (modify (builtins.mul 2)) get)) 42) 40 | (assertEq "fmap get" (run 40 (fmap (builtins.add 2) get)) 42) 41 | (assertEq "stateful sum list" 42 | (run 0 (after 43 | (for_ 44 | [ 45 | 15 46 | 12 47 | 10 48 | 5 49 | ] 50 | (x: modify (builtins.add x))) 51 | get)) 52 | 42) 53 | ]) 54 | (it "behaves correctly with an attr set state" [ 55 | (assertEq "getAttr" (run { foo = 42; } (getAttr "foo")) 42) 56 | (assertEq "after setAttr getAttr" 57 | (run { foo = 21; } (after (setAttr "foo" 42) (getAttr "foo"))) 58 | 42) 59 | (assertEq "after modifyAttr getAttr" 60 | (run { foo = 10.5; } 61 | (after 62 | (modifyAttr "foo" (builtins.mul 4)) 63 | (getAttr "foo"))) 64 | 42) 65 | (assertEq "fmap getAttr" 66 | (run { foo = 21; } (fmap (builtins.mul 2) (getAttr "foo"))) 67 | 42) 68 | (assertEq "after setAttr to insert getAttr" 69 | (run { } (after (setAttr "foo" 42) (getAttr "foo"))) 70 | 42) 71 | (assertEq "insert permutations" 72 | (run 73 | { 74 | a = 2; 75 | b = 3; 76 | c = 5; 77 | } 78 | (after 79 | (bind get 80 | (state: 81 | let 82 | names = builtins.attrNames state; 83 | in 84 | for_ names (name1: 85 | for_ names (name2: 86 | # this is of course a bit silly, but making it more cumbersome 87 | # makes sure the test exercises more of the code. 88 | (bind (getAttr name1) 89 | (value1: 90 | (bind (getAttr name2) 91 | (value2: 92 | setAttr "${name1}_${name2}" (value1 * value2))))))))) 93 | get)) 94 | { 95 | a = 2; 96 | b = 3; 97 | c = 5; 98 | a_a = 4; 99 | a_b = 6; 100 | a_c = 10; 101 | b_a = 6; 102 | b_b = 9; 103 | b_c = 15; 104 | c_c = 25; 105 | c_a = 10; 106 | c_b = 15; 107 | } 108 | ) 109 | ]) 110 | ] 111 | -------------------------------------------------------------------------------- /workspace.josh: -------------------------------------------------------------------------------- 1 | ::LICENSE 2 | besadii = :/ops/besadii 3 | :/nix:[ 4 | ::buildGo/ 5 | ::buildkite/ 6 | ::dependency-analyzer/ 7 | ::lazy-deps/ 8 | ::readTree/ 9 | ::stateMonad/ 10 | ] 11 | :/third_party:[ 12 | ::nixpkgs/ 13 | ::sources/ 14 | ] 15 | magrathea = :/tools/magrathea 16 | checks = :/tools/checks 17 | --------------------------------------------------------------------------------