├── .editorconfig ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── homebrew.yml │ └── test.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── example_workspace ├── .gitkeep ├── workspace-github.toml ├── workspace-lock.toml └── workspace.toml ├── images ├── readme-example.gif └── test-lfs.txt ├── release.toml ├── scripts ├── dev.sh └── update_graphql.sh ├── shell.nix ├── src ├── commands │ ├── add_provider.rs │ ├── archive.rs │ ├── completion.rs │ ├── fetch.rs │ ├── list.rs │ ├── lock.rs │ ├── mod.rs │ ├── run.rs │ ├── switch_and_pull.rs │ └── update.rs ├── config.rs ├── lib.rs ├── lockfile.rs ├── main.rs ├── providers │ ├── gitea.rs │ ├── github.rs │ ├── gitlab.rs │ ├── graphql │ │ ├── github │ │ │ ├── .graphqlconfig │ │ │ ├── projects.graphql │ │ │ └── schema.graphql │ │ └── gitlab │ │ │ ├── .graphqlconfig │ │ │ ├── projects.graphql │ │ │ └── schema.json │ └── mod.rs ├── repository.rs └── utils.rs └── tests ├── container ├── gitea.rs └── mod.rs └── gitea_tests.rs /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | charset = utf-8 6 | trim_trailing_whitespace = true 7 | insert_final_newline = true 8 | indent_style = space 9 | indent_size = 4 10 | 11 | [*.md] 12 | trim_trailing_whitespace = false 13 | 14 | [*.yml] 15 | indent_size = 2 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | images/test-lfs.txt filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | 3 | version: 2 4 | updates: 5 | - package-ecosystem: "cargo" 6 | directory: "/" 7 | schedule: 8 | interval: "daily" 9 | groups: 10 | dependencies: 11 | patterns: 12 | - "*" 13 | - package-ecosystem: "github-actions" 14 | directory: "/" 15 | schedule: 16 | interval: "daily" 17 | groups: 18 | dependencies: 19 | patterns: 20 | - "*" 21 | -------------------------------------------------------------------------------- /.github/workflows/homebrew.yml: -------------------------------------------------------------------------------- 1 | name: Homebrew 2 | on: 3 | push: 4 | tags: 5 | - 'v*' 6 | 7 | jobs: 8 | homebrew: 9 | name: Bump Homebrew formula 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: mislav/bump-homebrew-formula-action@v3 13 | with: 14 | formula-name: git-workspace 15 | env: 16 | COMMITTER_TOKEN: ${{ secrets.COMMITTER_TOKEN }} 17 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | 4 | push: 5 | tags: 6 | - 'v*' 7 | branches: 8 | - '**' 9 | 10 | name: CI 11 | 12 | jobs: 13 | build_and_test: 14 | name: Rust project 15 | runs-on: ${{ matrix.os }} 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [ ubuntu-latest, macos-latest, windows-latest ] 20 | include: 21 | - os: ubuntu-latest 22 | bin: git-workspace 23 | name: git-workspace-Linux-x86_64.tar.gz 24 | - os: macOS-latest 25 | bin: git-workspace 26 | name: git-workspace-Darwin-x86_64.tar.gz 27 | - os: windows-latest 28 | bin: git-workspace.exe 29 | name: git-workspace-Windows-x86_64.zip 30 | env: 31 | GITLAB_TOKEN: ${{ secrets.GITLAB_TOKEN }} 32 | GITLAB_ALTERNATIVE_TOKEN: ${{ secrets.GITLAB_TOKEN }} 33 | GITHUB_ALTERNATIVE_TOKEN: ${{ secrets.GITHUB_TOKEN }} 34 | GCM_INTERACTIVE: never 35 | GIT_TERMINAL_PROMPT: 0 36 | RUST_BACKTRACE: "1" 37 | SCCACHE_GHA_ENABLED: "true" 38 | RUSTC_WRAPPER: "sccache" 39 | release_profile: "release" 40 | steps: 41 | - name: Run sccache-cache 42 | uses: mozilla-actions/sccache-action@v0.0.7 43 | 44 | - name: Set workspace var 45 | shell: bash 46 | run: | 47 | mkdir -p "$RUNNER_TEMP/workspace-test-dir/" 48 | echo GIT_WORKSPACE=$RUNNER_TEMP/workspace-test-dir/ >> $GITHUB_ENV 49 | 50 | - uses: actions/checkout@master 51 | - name: Switch SSH to https 52 | shell: bash 53 | run: | 54 | git config --global url."https://gitlab.com/".insteadOf git@gitlab.com: 55 | git config --global url."https://github.com/".insteadOf git@github.com: 56 | git config --global url."https://".insteadOf git:// 57 | if [[ "${{ matrix.os }}" == "windows-latest" ]] 58 | then 59 | git config --global credential.helper wincred 60 | fi 61 | 62 | - name: Run sccache-cache 63 | uses: mozilla-actions/sccache-action@v0.0.7 64 | 65 | - uses: actions-rust-lang/setup-rust-toolchain@v1 66 | with: 67 | cache-on-failure: 'false' 68 | 69 | - run: cargo build 70 | 71 | - name: git workspace add github org 72 | run: cargo run -- add github django --exclude "/django.*" --env-name GITHUB_ALTERNATIVE_TOKEN 73 | 74 | - name: git-workspace update 75 | run: cargo run -- update 76 | 77 | - name: git-workspace fetch 78 | run: cargo run -- fetch 79 | 80 | - name: git-workspace list 81 | run: cargo run -- list 82 | 83 | - name: Remove workspace 84 | shell: bash 85 | run: echo > ${GIT_WORKSPACE}/workspace.toml 86 | 87 | - name: git-workspace archive 88 | run: cargo run -- archive --force 89 | 90 | - name: Build release 91 | if: startsWith(github.ref, 'refs/tags/') || inputs.publish-tag 92 | run: cargo build --profile=${{env.release_profile}} 93 | - name: Package 94 | if: startsWith(github.ref, 'refs/tags/') || inputs.publish-tag 95 | shell: bash 96 | run: | 97 | strip target/${{env.release_profile}}/${{ matrix.bin }} 98 | cd target/${{env.release_profile}} 99 | if [[ "${{ matrix.os }}" == "windows-latest" ]] 100 | then 101 | 7z a ../../${{ matrix.name }} ${{ matrix.bin }} 102 | else 103 | tar czvf ../../${{ matrix.name }} ${{ matrix.bin }} 104 | fi 105 | cd - 106 | - name: Archive binaries 107 | uses: actions/upload-artifact@v4 108 | if: startsWith(github.ref, 'refs/tags/') || inputs.publish-tag 109 | with: 110 | name: build-${{ matrix.name }} 111 | path: ${{ matrix.name }} 112 | 113 | checks: 114 | name: Checks 115 | runs-on: ubuntu-latest 116 | env: 117 | RUST_BACKTRACE: "1" 118 | SCCACHE_GHA_ENABLED: "true" 119 | RUSTC_WRAPPER: "sccache" 120 | steps: 121 | - name: Checkout sources 122 | uses: actions/checkout@v4 123 | with: 124 | ref: ${{ inputs.publish-tag || null }} 125 | 126 | - name: Run sccache-cache 127 | uses: mozilla-actions/sccache-action@v0.0.7 128 | 129 | - uses: actions-rust-lang/setup-rust-toolchain@v1 130 | with: 131 | cache-on-failure: 'false' 132 | 133 | - name: Run cargo fmt 134 | if: success() || failure() 135 | run: cargo fmt --all -- --check 136 | 137 | - name: Run cargo check 138 | if: success() || failure() 139 | run: cargo check 140 | 141 | - if: success() || failure() 142 | run: cargo clippy --all-targets --all-features -- -D warnings 143 | 144 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | ### Rust template 4 | # Generated by Cargo 5 | # will have compiled files and executables 6 | /target/ 7 | 8 | .idea/ 9 | 10 | example_workspace/* 11 | !example_workspace/*.toml 12 | !example_workspace/.gitkeep 13 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "git-workspace" 3 | version = "1.9.0" 4 | readme = "README.md" 5 | repository = "https://github.com/orf/git-workspace" 6 | authors = ["Tom Forbes "] 7 | edition = "2021" 8 | description = "Manage and update personal and work git repos from multiple providers" 9 | license = "MIT" 10 | categories = ["development-tools"] 11 | keywords = ["cli", "git"] 12 | exclude = ["workspace/**", "images/**", ".github/**"] 13 | rust-version = "1.74.1" 14 | 15 | [dependencies] 16 | toml = "0.8.19" 17 | serde = { version = "1.0.217", features = ["derive"] } 18 | anyhow = "1.0.95" 19 | rayon = "1.10.0" 20 | graphql_client = "0.14.0" 21 | walkdir = "2.5.0" 22 | fs_extra = "1.3.0" 23 | indicatif = { version = "0.17.9", features = ["rayon"] } 24 | console = { version = "0.15.10", features = [ 25 | "ansi-parsing", 26 | ], default-features = false } # We don't care about unicode widths 27 | atomic-counter = "1.0.1" 28 | ureq = { version = "2.12.1", features = ["json", "native-certs", "brotli"] } 29 | serde_json = "1.0.137" 30 | globset = "0.4.15" 31 | git2 = { version = "0.20.0" } 32 | regex = "1.11.1" 33 | clap = { version = "4.5.27", features = ["derive", "env"] } 34 | clap_complete = "4.5.46" 35 | 36 | [dev-dependencies] 37 | rstest = "0.24.0" 38 | tempfile = "3.14.0" 39 | testcontainers = { version = "0.23.1", features = ["blocking"] } 40 | testcontainers-modules = { version = "0.11.4", features = ["gitea"] } 41 | reqwest = { version = "0.12.12", features = ["blocking", "brotli", "json"] } 42 | ssh-key = { version = "0.6.7", features = ["ed25519"] } 43 | rand = "0.8.5" 44 | base64 = "0.22.1" 45 | ctor = "0.2.9" 46 | 47 | [target."cfg(unix)".dependencies] 48 | expanduser = "1.2.2" 49 | 50 | [profile.release] 51 | lto = true 52 | codegen-units = 1 53 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Tom Forbes 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # git-workspace :rocket: 2 | [![Crates.io](https://img.shields.io/crates/v/git-workspace.svg)](https://crates.io/crates/git-workspace) 3 | [![Actions Status](https://github.com/orf/git-workspace/workflows/CI/badge.svg)](https://github.com/orf/git-workspace/actions) 4 | 5 | ![](./images/readme-example.gif) 6 | 7 | If your company has a large number of repositories and your work involves jumping between a lot of them then `git-workspace` can save you some time by: 8 | 9 | * Easily synchronizing your projects directory with **Github**, **Gitlab.com**, **Gitlab self-hosted** or **Gitea** :wrench: 10 | * Keep projects consistently named and under the correct path :file_folder: 11 | * Automatically set upstreams for forks :zap: 12 | * Move deleted repositories to an archive directory :floppy_disk: 13 | * Allowing you to access any repository instantly :shipit: 14 | * Execute `git fetch` on all projects in parallel :godmode: 15 | 16 | This may sound useless, but the "log into your git provider, browse to the project, copy the clone URL, devise a suitable path to clone it" dance can be a big slowdown. The only obvious solution here is to spend more time than you'll ever spend doing this in your whole life on writing a tool in Rust to do it for you. 17 | 18 | Table of Contents 19 | ================= 20 | 21 | * [Install :cd:](#install-cd) 22 | * [Homebrew (MacOS Linux)](#homebrew-macos--linux) 23 | * [Nix (MacOS Linux)](#nix-macos--linux) 24 | * [AUR (ArchLinux)](#aur-archlinux) 25 | * [Binaries (Windows)](#binaries-windows) 26 | * [Cargo](#cargo) 27 | * [Usage :saxophone:](#usage-saxophone) 28 | * [Define your workspace](#define-your-workspace) 29 | * [Provider credentials](#provider-credentials) 30 | * [Adding providers](#adding-providers) 31 | * [Updating your workspace](#updating-your-workspace) 32 | * [Fetching all changes](#fetching-all-changes) 33 | * [Switch projects :repeat:](#switch-projects-repeat) 34 | * [Fish, with fzf](#fish-with-fzf) 35 | * [Contributing :bug:](#contributing-bug) 36 | 37 | # Install :cd: 38 | 39 | ## Homebrew (MacOS + Linux) 40 | 41 | `brew install git-workspace` 42 | 43 | ## Nix (MacOS + Linux) 44 | 45 | ### nix-shell 46 | ```bash 47 | nix-shell -p git-workspace 48 | ``` 49 | 50 | ### nix shell (Flakes) 51 | ```bash 52 | nix shell nixpkgs#git-workspace 53 | ``` 54 | 55 | ### home-manager (home.nix) 56 | 57 | ```nix 58 | { 59 | home.packages = with pkgs; [ 60 | git-workspace 61 | ]; 62 | } 63 | ``` 64 | 65 | ### NixOS (configuration.nix) 66 | 67 | ```nix 68 | { 69 | environment.systemPackages = with pkgs; [ 70 | git-workspace 71 | ]; 72 | } 73 | ``` 74 | 75 | ## AUR (ArchLinux) 76 | 77 | `paru -S git-workspace` 78 | 79 | ## Binaries (Windows) 80 | 81 | Download the latest release from [the github releases page](https://github.com/orf/git-workspace/releases). Extract it 82 | and move it to a directory on your `PATH`. 83 | 84 | ## Cargo 85 | 86 | Don't do this, it's pretty slow: `cargo install git-workspace` 87 | 88 | # Usage :saxophone: 89 | 90 | Git is really annoying and hijacks the `--help` flag for subcommands. **So to get help use `git-workspace --help`, not `git workspace --help`**. 91 | 92 | ``` 93 | $ git-workspace --help 94 | git-workspace 1.1.0 95 | Tom Forbes 96 | Manage and update personal and work git repos from multiple providers 97 | 98 | USAGE: 99 | git-workspace --workspace 100 | 101 | FLAGS: 102 | -h, --help 103 | Prints help information 104 | 105 | -V, --version 106 | Prints version information 107 | 108 | 109 | OPTIONS: 110 | -w, --workspace 111 | [env: GIT_WORKSPACE=/Users/tom/PycharmProjects/] 112 | 113 | 114 | SUBCOMMANDS: 115 | add Add a provider to the configuration 116 | archive Archive repositories that don't exist in the workspace anymore 117 | fetch Fetch new commits for all repositories in the workspace 118 | help Prints this message or the help of the given subcommand(s) 119 | list List all repositories in the workspace 120 | lock Fetch all repositories from configured providers and write the lockfile 121 | run Run a git command in all repositories 122 | switch-and-pull Pull new commits on the primary branch for all repositories in the workspace 123 | update Update the workspace, removing and adding any repositories as needed 124 | ``` 125 | 126 | ## Define your workspace 127 | 128 | A workspace is the directory that `git-workspace` will manage for you, filling it with projects cloned from your providers. To configure this just set a `GIT_WORKSPACE` environment variable that points to an empty directory. For example: 129 | 130 | `export GIT_WORKSPACE=~/projects` 131 | 132 | ## Provider credentials 133 | 134 | Both Github and Gitlab require personal access tokens to access their GraphQL endpoints. Create an access token here: 135 | 136 | * Github: https://github.com/settings/tokens (Just the `repo` scope) 137 | 138 | * Gitlab: https://gitlab.com/profile/personal_access_tokens (Just the `api` scope) 139 | 140 | Export these tokens as `GITHUB_TOKEN` and `GITLAB_TOKEN` in your shell. 141 | 142 | ## Adding providers 143 | 144 | You can use `git workspace add` to quickly add entries to your `workspace.toml`: 145 | 146 | * Clone all github repositories for a user or org 147 | 148 | * `git workspace add github [USER OR ORG NAME]` 149 | 150 | * Include and exclude specific repositories: 151 | 152 | * `git workspace add github [USER OR ORG NAME] --include="a.*$" --include="b.*$" --exclude="aa.*$" --exclude="bb.*$"` 153 | 154 | * Both `--include` and `--exclude` can be specified multiple times. 155 | * By default all repositories are included. 156 | * All `include` filters are evaluated before the `exclude` filters. 157 | 158 | * Clone a namespace or user from Gitlab: 159 | 160 | * `git workspace add gitlab gitlab-ce/gitlab-services` 161 | 162 | * Clone from a self-hosted gitlab/github instance: 163 | 164 | * `git workspace add gitlab my-company-group --url=https://internal-gitlab.company.com` 165 | * `git workspace add github user-or-org-name --url=https://internal-github.company.com/api/graphql` 166 | 167 | * 168 | 169 | ### Multiple configs 170 | 171 | Git workspace will read from any `workspace*.toml` file under your `$GIT_WORKSPACE` directory. 172 | 173 | ## Updating your workspace 174 | 175 | Running `git workspace update` will: 176 | 177 | 1. Fetch all repositories from your providers 178 | 2. Clone any new repositories that are not present locally 179 | 3. Move any deleted repositories to `$GIT_WORKSPACE/.archived/` for posterity 180 | 181 | ## Fetching all changes 182 | 183 | `git workspace fetch` will run `git fetch` on all projects. 184 | 185 | # Switch projects :repeat: 186 | 187 | `git workspace list` will output the names of all your projects. You can integrate this with whatever tool you wish to provide a way to quickly search for and select repositories. 188 | 189 | ## Fish, with [fzf](https://github.com/junegunn/fzf) 190 | 191 | The following fish shell snippet gives you a `open-project [search-string]` command you can use to search for and open projects. It combines the `git workspace list` command with `fzf`, and opens the project path with your `$EDITOR`: 192 | 193 | ```fish 194 | # ~/.config/fish/functions/open-project.fish 195 | function open-project -d "Open a project" 196 | set filter "$argv" 197 | set chosen_project (git workspace list | fzf -q "$filter") 198 | if string length -q -- $chosen_project 199 | $EDITOR $GIT_WORKSPACE/$chosen_project 200 | pushd $GIT_WORKSPACE/$chosen_project 201 | end 202 | end 203 | ``` 204 | 205 | ## Zsh, with [fzf](https://github.com/junegunn/fzf) 206 | 207 | ``` 208 | function project { 209 | local filter="$@" 210 | local chosen_project=$(git workspace list | fzf -q "$filter") 211 | if [[ -n $chosen_project ]]; then 212 | pushd "$GIT_WORKSPACE/$chosen_project" 213 | fi 214 | } 215 | ``` 216 | 217 | ## Bash, with [fzf](https://github.com/junegunn/fzf) 218 | 219 | Contributed by a user (@kreyren:github.com): 220 | 221 | ```bash 222 | #!/bin/sh 223 | # shellcheck shell=sh # Written to comply with IEEE Std 1003.1-2017 for standard POSIX environment 224 | 225 | ###! # WorkSPace (wsp) 226 | ###! Switches to specified git-workspace project directory 227 | ###! - Requires git and fzf 228 | wsp() { 229 | # Check for required non-standard commands 230 | for command in ${FZF:-"fzf"} ${GIT:-"git"}; do 231 | ${COMMAND:-"command"} -v "$command" || { ${PRINTF:-"printf"} "FATAL: %s\\n" "Command '$command' is not executable"; ${EXIT:-"exit"} 127 ;} 232 | done 233 | 234 | # shellcheck disable=SC2086 # Harmless warning about missing double-quotes that are not expected to allow parsing multiple arguments 235 | wsp_path="${1:-"${GTT_WORKSPACE:-"$PWD"}/$(${GIT:-"git"} workspace list | ${FZF:-"fzf"} ${fzf_arg:-"-q"} "$@")"}" # Path to the git workspace directory 236 | 237 | # Change directory 238 | ${CD:-"cd"} "$wsp_path" || { printf "FATAL: %s\\n" "Unable to change directory to '$wsp_path'";} 239 | } 240 | ``` 241 | 242 | Consider using [shfmt](https://github.com/patrickvane/shfmt) to optimize the file size. 243 | 244 | 245 | # Contributing :bug: 246 | 247 | This is my first 'proper' Rust project. If you're experienced with Rust you might puke at the code, but any feedback to help me improve would be greatly appreciated! 248 | 249 | If you want to contribute then just go for it. `cargo install` should get you ready to go. Be warned: there are currently no tests :bomb:. I run integration tests with Github Actions, but that's about it. It's on my to-do list, I promise :tm:. 250 | -------------------------------------------------------------------------------- /example_workspace/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orf/git-workspace/8363d4383cc1ae1d38d54083da826c58ff2e9996/example_workspace/.gitkeep -------------------------------------------------------------------------------- /example_workspace/workspace-github.toml: -------------------------------------------------------------------------------- 1 | [[provider]] 2 | provider = "github" 3 | name = "django" 4 | path = "github" 5 | env_var = "GITHUB_TOKEN" 6 | skip_forks = false 7 | exclude = [ 8 | "djangobench$", 9 | "dsf-.*" 10 | ] 11 | -------------------------------------------------------------------------------- /example_workspace/workspace-lock.toml: -------------------------------------------------------------------------------- 1 | repo = [] 2 | -------------------------------------------------------------------------------- /example_workspace/workspace.toml: -------------------------------------------------------------------------------- 1 | [[provider]] 2 | provider = "gitlab" 3 | name = "tom6" 4 | url = "https://gitlab.com" 5 | path = "gitlab" 6 | env_var = "GITLAB_TOKEN" 7 | exclude = [] 8 | -------------------------------------------------------------------------------- /images/readme-example.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orf/git-workspace/8363d4383cc1ae1d38d54083da826c58ff2e9996/images/readme-example.gif -------------------------------------------------------------------------------- /images/test-lfs.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orf/git-workspace/8363d4383cc1ae1d38d54083da826c58ff2e9996/images/test-lfs.txt -------------------------------------------------------------------------------- /release.toml: -------------------------------------------------------------------------------- 1 | pre-release-commit-message = "chore({{crate_name}}): Release {{version}}" 2 | tag-message = "v{{version}}" 3 | tag-name = "v{{version}}" 4 | -------------------------------------------------------------------------------- /scripts/dev.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | GIT_WORKSPACE=$(pwd)/example_workspace cargo run -- "${@}" 4 | -------------------------------------------------------------------------------- /scripts/update_graphql.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | graphql-client introspect-schema https://gitlab.com/api/graphql > src/providers/graphql/gitlab/schema.json 4 | wget https://docs.github.com/public/schema.docs.graphql -O src/providers/graphql/github/schema.graphql 5 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | # NixOS shell configuration to bootstrap the required dependencies 2 | 3 | let 4 | moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz); 5 | nixpkgs = import { overlays = [ moz_overlay ]; }; 6 | in 7 | with nixpkgs; 8 | stdenv.mkDerivation { 9 | name = "moz_overlay_shell"; 10 | buildInputs = [ 11 | nixpkgs.latest.rustChannels.stable.rust 12 | ]; 13 | } 14 | -------------------------------------------------------------------------------- /src/commands/add_provider.rs: -------------------------------------------------------------------------------- 1 | use crate::config::{Config, ProviderSource}; 2 | use anyhow::{anyhow, Context}; 3 | use console::style; 4 | use std::path::Path; 5 | 6 | /// Add a given ProviderSource to our configuration file. 7 | pub fn add_provider_to_config( 8 | workspace: &Path, 9 | provider_source: ProviderSource, 10 | file: &Path, 11 | ) -> anyhow::Result<()> { 12 | if !provider_source.correctly_configured() { 13 | return Err(anyhow!("Provider is not correctly configured")); 14 | } 15 | let path_to_config = workspace.join(file); 16 | // Load and parse our configuration files 17 | let config = Config::new(vec![path_to_config]); 18 | let mut sources = config.read().with_context(|| "Error reading config file")?; 19 | // Ensure we don't add duplicates: 20 | if sources.iter().any(|s| s == &provider_source) { 21 | println!("Entry already exists, skipping"); 22 | } else { 23 | println!( 24 | "Adding {} to {}", 25 | provider_source, 26 | style(&workspace.join(file).display()).green() 27 | ); 28 | // Push the provider into the source and write it to the configuration file 29 | sources.push(provider_source); 30 | config 31 | .write(sources, &workspace.join(file)) 32 | .with_context(|| "Error writing config file")?; 33 | } 34 | Ok(()) 35 | } 36 | -------------------------------------------------------------------------------- /src/commands/archive.rs: -------------------------------------------------------------------------------- 1 | use super::lock; 2 | use crate::lockfile::Lockfile; 3 | use crate::utils; 4 | use anyhow::Context; 5 | use console::style; 6 | use std::path::{Path, PathBuf}; 7 | 8 | use super::get_all_repositories_to_archive; 9 | 10 | pub fn archive(workspace: &Path, force: bool) -> anyhow::Result<()> { 11 | // Archive any repositories that have been deleted from the lockfile. 12 | lock(workspace)?; 13 | 14 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 15 | let repositories = lockfile.read().context("Error reading lockfile")?; 16 | let repos_to_archive = get_all_repositories_to_archive(workspace, repositories)?; 17 | 18 | if !force { 19 | for (from_path, to_path) in &repos_to_archive { 20 | let relative_from_path = from_path.strip_prefix(workspace).unwrap(); 21 | let relative_to_path = to_path.strip_prefix(workspace).unwrap(); 22 | println!( 23 | "Move {} to {}", 24 | style(relative_from_path.display()).yellow(), 25 | style(relative_to_path.display()).green() 26 | ); 27 | } 28 | println!( 29 | "Will archive {} projects", 30 | style(repos_to_archive.len()).red() 31 | ); 32 | if repos_to_archive.is_empty() || !utils::confirm("Proceed?", false, " ", true) { 33 | return Ok(()); 34 | } 35 | } 36 | if !repos_to_archive.is_empty() { 37 | archive_repositories(repos_to_archive)?; 38 | } 39 | Ok(()) 40 | } 41 | 42 | fn archive_repositories(to_archive: Vec<(PathBuf, PathBuf)>) -> anyhow::Result<()> { 43 | println!("Archiving {} repositories", to_archive.len()); 44 | for (from_dir, to_dir) in to_archive.into_iter() { 45 | let parent_dir = &to_dir.parent().with_context(|| { 46 | format!("Failed to get the parent directory of {}", to_dir.display()) 47 | })?; 48 | // Create all the directories that are needed: 49 | fs_extra::dir::create_all(parent_dir, false) 50 | .with_context(|| format!("Error creating directory {}", to_dir.display()))?; 51 | 52 | // Move the directory to the archive directory: 53 | match std::fs::rename(&from_dir, &to_dir) { 54 | Ok(_) => { 55 | println!( 56 | "Moved {} to {}", 57 | style(from_dir.display()).yellow(), 58 | style(to_dir.display()).green() 59 | ); 60 | } 61 | Err(e) => { 62 | eprintln!( 63 | "{} {e}\n Target: {}\n Dest: {}\nPlease remove existing directory before retrying", 64 | style("Error moving directory!").red(), 65 | style(from_dir.display()).yellow(), 66 | style(to_dir.display()).green() 67 | ); 68 | } 69 | }; 70 | } 71 | 72 | Ok(()) 73 | } 74 | -------------------------------------------------------------------------------- /src/commands/completion.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use clap::Command; 4 | use clap_complete::{generate, Shell}; 5 | 6 | /// Generate shell completions 7 | pub fn completion(shell: Shell, app: &mut Command) -> anyhow::Result<()> { 8 | generate(shell, app, app.get_name().to_string(), &mut io::stdout()); 9 | Ok(()) 10 | } 11 | -------------------------------------------------------------------------------- /src/commands/fetch.rs: -------------------------------------------------------------------------------- 1 | use super::execute_cmd; 2 | use std::path::Path; 3 | 4 | /// Run `git fetch` on all our repositories 5 | pub fn fetch(workspace: &Path, threads: usize) -> anyhow::Result<()> { 6 | let cmd = [ 7 | "fetch", 8 | "--all", 9 | "--prune", 10 | "--recurse-submodules=on-demand", 11 | "--progress", 12 | ]; 13 | execute_cmd( 14 | workspace, 15 | threads, 16 | "git".to_string(), 17 | cmd.iter().map(|s| (*s).to_string()).collect(), 18 | )?; 19 | Ok(()) 20 | } 21 | -------------------------------------------------------------------------------- /src/commands/list.rs: -------------------------------------------------------------------------------- 1 | use crate::lockfile::Lockfile; 2 | use anyhow::Context; 3 | use std::path::Path; 4 | 5 | /// List the contents of our workspace 6 | pub fn list(workspace: &Path, full: bool) -> anyhow::Result<()> { 7 | // Read and parse the lockfile 8 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 9 | let repositories = lockfile.read().context("Error reading lockfile")?; 10 | let existing_repositories = repositories.iter().filter(|r| r.exists(workspace)); 11 | for repo in existing_repositories { 12 | if full { 13 | println!("{}", repo.get_path(workspace).unwrap().display()); 14 | } else { 15 | println!("{}", repo.name()); 16 | } 17 | } 18 | Ok(()) 19 | } 20 | -------------------------------------------------------------------------------- /src/commands/lock.rs: -------------------------------------------------------------------------------- 1 | use crate::config::Config; 2 | use crate::lockfile::Lockfile; 3 | use crate::repository::Repository; 4 | use anyhow::Context; 5 | use indicatif::ParallelProgressIterator; 6 | use indicatif::{ProgressBar, ProgressStyle}; 7 | use rayon::prelude::*; 8 | use std::path::Path; 9 | 10 | /// Update our lockfile 11 | pub fn lock(workspace: &Path) -> anyhow::Result<()> { 12 | let config = Config::from_workspace(workspace)?; 13 | 14 | // Read the configuration sources 15 | let sources = config 16 | .read() 17 | .with_context(|| "Error reading config files")?; 18 | 19 | let total_bar = ProgressBar::new(sources.len() as u64); 20 | total_bar.set_style( 21 | ProgressStyle::default_bar() 22 | .template("[{elapsed_precise}] {percent}% [{wide_bar:.cyan/blue}] {pos}/{len} (ETA: {eta_precise})").expect("Invalid template") 23 | .progress_chars("#>-"), 24 | ); 25 | 26 | println!("Fetching repositories..."); 27 | 28 | // For each source, in sequence, fetch the repositories 29 | let results = sources 30 | .par_iter() 31 | .map(|source| { 32 | source 33 | .fetch_repositories() 34 | .with_context(|| format!("Error fetching repositories from {}", source)) 35 | }) 36 | .progress_with(total_bar) 37 | .collect::>>()?; 38 | let mut all_repositories: Vec = results.into_iter().flatten().collect(); 39 | // let all_repositories: Vec = all_repository_results.iter().collect::>>()?; 40 | // We may have duplicated repositories here. Make sure they are unique based on the full path. 41 | all_repositories.sort(); 42 | all_repositories.dedup(); 43 | // Write the lockfile out 44 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 45 | lockfile.write(&all_repositories)?; 46 | Ok(()) 47 | } 48 | -------------------------------------------------------------------------------- /src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod add_provider; 2 | pub mod archive; 3 | pub mod completion; 4 | pub mod fetch; 5 | pub mod list; 6 | pub mod lock; 7 | pub mod run; 8 | pub mod switch_and_pull; 9 | pub mod update; 10 | 11 | pub use add_provider::add_provider_to_config; 12 | pub use archive::archive; 13 | pub use completion::completion; 14 | pub use fetch::fetch; 15 | pub use list::list; 16 | pub use lock::lock; 17 | pub use run::execute_cmd; 18 | pub use switch_and_pull::pull_all_repositories; 19 | pub use update::update; 20 | 21 | use crate::repository::Repository; 22 | use anyhow::{anyhow, Context}; 23 | use atomic_counter::{AtomicCounter, RelaxedCounter}; 24 | use indicatif::{MultiProgress, ParallelProgressIterator, ProgressBar, ProgressStyle}; 25 | use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; 26 | use std::collections::HashSet; 27 | use std::path::{Path, PathBuf}; 28 | use std::sync::Arc; 29 | use std::time::Duration; 30 | use walkdir::WalkDir; 31 | 32 | /// Take any number of repositories and apply `f` on each one. 33 | /// This method takes care of displaying progress bars and displaying 34 | /// any errors that may arise. 35 | pub fn map_repositories(repositories: &[Repository], threads: usize, f: F) -> anyhow::Result<()> 36 | where 37 | F: Fn(&Repository, &ProgressBar) -> anyhow::Result<()> + std::marker::Sync, 38 | { 39 | // Create our progress bar. We use Arc here as we need to share the MultiProgress across 40 | // more than 1 thread (described below) 41 | let progress = Arc::new(MultiProgress::new()); 42 | // Create our total progress bar used with `.progress_iter()`. 43 | let total_bar = progress.add(ProgressBar::new(repositories.len() as u64)); 44 | total_bar.set_style( 45 | ProgressStyle::default_bar() 46 | .template("[{elapsed_precise}] {percent}% [{wide_bar:.cyan/blue}] {pos}/{len} (ETA: {eta_precise})").expect("Invalid template") 47 | .progress_chars("#>-"), 48 | ); 49 | 50 | // user_attended() means a tty is attached to the output. 51 | let is_attended = console::user_attended(); 52 | let total_repositories = repositories.len(); 53 | // Use a counter here if there is no tty, to show a stream of progress messages rather than 54 | // a dynamic progress bar. 55 | let counter = RelaxedCounter::new(1); 56 | 57 | // Create our thread pool. We do this rather than use `.par_iter()` on any iterable as it 58 | // allows us to customize the number of threads. 59 | let pool = rayon::ThreadPoolBuilder::new() 60 | .num_threads(threads) 61 | .build() 62 | .with_context(|| "Error creating the thread pool")?; 63 | 64 | // pool.install means that `.par_iter()` will use the thread pool we've built above. 65 | let errors: Vec<(&Repository, anyhow::Error)> = pool.install(|| { 66 | repositories 67 | .par_iter() 68 | // Update our progress bar with each iteration 69 | .map(|repo| { 70 | // Create a progress bar and configure some defaults 71 | let progress_bar = progress.add(ProgressBar::new_spinner()); 72 | progress_bar.set_message("waiting..."); 73 | progress_bar.enable_steady_tick(Duration::from_millis(500)); 74 | // Increment our counter for use if the console is not a tty. 75 | let idx = counter.inc(); 76 | if !is_attended { 77 | println!("[{}/{}] Starting {}", idx, total_repositories, repo.name()); 78 | } 79 | // Run our given function. If the result is an error then attach the 80 | // erroring Repository object to it. 81 | let result = match f(repo, &progress_bar) { 82 | Ok(_) => Ok(()), 83 | Err(e) => Err((repo, e)), 84 | }; 85 | if !is_attended { 86 | println!("[{}/{}] Finished {}", idx, total_repositories, repo.name()); 87 | } 88 | // Clear the progress bar and return the result 89 | progress_bar.finish_and_clear(); 90 | result 91 | }) 92 | .progress_with(total_bar) 93 | // We only care about errors here, so filter them out. 94 | .filter_map(Result::err) 95 | // Collect the results into a Vec 96 | .collect() 97 | }); 98 | 99 | // Print out each repository that failed to run. 100 | if !errors.is_empty() { 101 | eprintln!("{} repositories failed:", errors.len()); 102 | for (repo, error) in errors { 103 | eprintln!("{}:", repo.name()); 104 | error 105 | .chain() 106 | .for_each(|cause| eprintln!("because: {}", cause)); 107 | } 108 | } 109 | 110 | Ok(()) 111 | } 112 | 113 | /// Find all projects that have been archived or deleted on our providers 114 | pub fn get_all_repositories_to_archive( 115 | workspace: &Path, 116 | repositories: Vec, 117 | ) -> anyhow::Result> { 118 | // The logic here is as follows: 119 | // 1. Iterate through all directories. If it's a "safe" directory (one that contains a project 120 | // in our lockfile), we skip it entirely. 121 | // 2. If the directory is not, and contains a `.git` directory, then we mark it for archival and 122 | // skip processing. 123 | // This assumes nobody deletes a .git directory in one of their projects. 124 | 125 | // Windows doesn't like .archive. 126 | let archive_directory = if cfg!(windows) { 127 | workspace.join("_archive") 128 | } else { 129 | workspace.join(".archive") 130 | }; 131 | 132 | // Create a set of all repository paths that currently exist. 133 | let mut repository_paths: HashSet = repositories 134 | .iter() 135 | .filter(|r| r.exists(workspace)) 136 | .map(|r| r.get_path(workspace)) 137 | .filter_map(Result::ok) 138 | .collect(); 139 | 140 | // If the archive directory does not exist then we create it 141 | if !archive_directory.exists() { 142 | fs_extra::dir::create(&archive_directory, false).with_context(|| { 143 | format!( 144 | "Error creating archive directory {}", 145 | archive_directory.display() 146 | ) 147 | })?; 148 | } 149 | 150 | // Make sure we add our archive directory to the set of repository paths. This ensures that 151 | // it's not traversed below! 152 | repository_paths.insert( 153 | archive_directory 154 | .canonicalize() 155 | .with_context(|| "Error canoncalizing archive directory")?, 156 | ); 157 | 158 | let mut to_archive = Vec::new(); 159 | let mut it = WalkDir::new(workspace).into_iter(); 160 | 161 | // Waldir provides a `filter_entry` method, but I couldn't work out how to use it 162 | // correctly here. So we just roll our own loop: 163 | loop { 164 | // Find the next directory. This can throw an error, in which case we bail out. 165 | // Perhaps we shouldn't bail here? 166 | let entry = match it.next() { 167 | None => break, 168 | Some(Err(err)) => return Err(anyhow!("Error iterating through directory: {}", err)), 169 | Some(Ok(entry)) => entry, 170 | }; 171 | // If the current path is in the set of repository paths then we skip processing it entirely. 172 | if repository_paths.contains(entry.path()) { 173 | it.skip_current_dir(); 174 | continue; 175 | } 176 | // If the entry has a .git directory inside it then we add it to the `to_archive` list 177 | // and skip the current directory. 178 | if entry.path().join(".git").is_dir() { 179 | let path = entry.path(); 180 | // Find the relative path of the directory from the workspace. So if you have something 181 | // like `workspace/github/repo-name`, it will be `github/repo-name`. 182 | let relative_dir = path.strip_prefix(workspace).with_context(|| { 183 | format!( 184 | "Failed to strip the prefix '{}' from {}", 185 | workspace.display(), 186 | path.display() 187 | ) 188 | })?; 189 | // Join the relative directory (`github/repo-name`) with the archive directory. 190 | let to_dir = archive_directory.join(relative_dir); 191 | to_archive.push((path.to_path_buf(), to_dir)); 192 | it.skip_current_dir(); 193 | continue; 194 | } 195 | } 196 | 197 | Ok(to_archive) 198 | } 199 | -------------------------------------------------------------------------------- /src/commands/run.rs: -------------------------------------------------------------------------------- 1 | use super::map_repositories; 2 | use crate::lockfile::Lockfile; 3 | use crate::repository::Repository; 4 | use std::path::Path; 5 | 6 | /// Execute a command on all our repositories 7 | pub fn execute_cmd( 8 | workspace: &Path, 9 | threads: usize, 10 | cmd: String, 11 | args: Vec, 12 | ) -> anyhow::Result<()> { 13 | // Read the lockfile 14 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 15 | let repositories = lockfile.read()?; 16 | 17 | // We only care about repositories that exist 18 | let repos_to_fetch: Vec = repositories 19 | .iter() 20 | .filter(|r| r.exists(workspace)) 21 | .cloned() 22 | .collect(); 23 | 24 | println!( 25 | "Running {} {} on {} repositories", 26 | cmd, 27 | args.join(" "), 28 | repos_to_fetch.len() 29 | ); 30 | 31 | // Run fetch on them 32 | map_repositories(&repos_to_fetch, threads, |r, progress_bar| { 33 | r.execute_cmd(workspace, progress_bar, &cmd, &args) 34 | })?; 35 | Ok(()) 36 | } 37 | -------------------------------------------------------------------------------- /src/commands/switch_and_pull.rs: -------------------------------------------------------------------------------- 1 | use super::map_repositories; 2 | use crate::lockfile::Lockfile; 3 | use anyhow::Context; 4 | use std::path::Path; 5 | 6 | pub fn pull_all_repositories(workspace: &Path, threads: usize) -> anyhow::Result<()> { 7 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 8 | let repositories = lockfile.read().with_context(|| "Error reading lockfile")?; 9 | 10 | println!( 11 | "Switching to the primary branch and pulling {} repositories", 12 | repositories.len() 13 | ); 14 | 15 | map_repositories(&repositories, threads, |r, progress_bar| { 16 | r.switch_to_primary_branch(workspace)?; 17 | let pull_args = match (&r.upstream, &r.branch) { 18 | // This fucking sucks, but it's because my abstractions suck ass. 19 | // I need to learn how to fix this. 20 | (Some(_), Some(branch)) => vec![ 21 | "pull".to_string(), 22 | "upstream".to_string(), 23 | branch.to_string(), 24 | ], 25 | _ => vec!["pull".to_string()], 26 | }; 27 | r.execute_cmd(workspace, progress_bar, "git", &pull_args)?; 28 | Ok(()) 29 | })?; 30 | 31 | Ok(()) 32 | } 33 | -------------------------------------------------------------------------------- /src/commands/update.rs: -------------------------------------------------------------------------------- 1 | use super::map_repositories; 2 | use crate::commands::get_all_repositories_to_archive; 3 | use crate::lockfile::Lockfile; 4 | use anyhow::Context; 5 | use console::style; 6 | use std::path::Path; 7 | 8 | /// Update our workspace. This clones any new repositories and print old repositories to archives. 9 | pub fn update(workspace: &Path, threads: usize) -> anyhow::Result<()> { 10 | // Load our lockfile 11 | let lockfile = Lockfile::new(workspace.join("workspace-lock.toml")); 12 | let repositories = lockfile.read().with_context(|| "Error reading lockfile")?; 13 | 14 | println!("Updating {} repositories", repositories.len()); 15 | 16 | map_repositories(&repositories, threads, |r, progress_bar| { 17 | // Only clone repositories that don't exist 18 | if !r.exists(workspace) { 19 | r.clone(workspace, progress_bar)?; 20 | // Maybe this should always be run, but whatever. It's fine for now. 21 | r.set_upstream(workspace)?; 22 | } 23 | Ok(()) 24 | })?; 25 | 26 | let repos_to_archive = get_all_repositories_to_archive(workspace, repositories)?; 27 | if !repos_to_archive.is_empty() { 28 | println!( 29 | "There are {} repositories that can be archived", 30 | repos_to_archive.len() 31 | ); 32 | println!( 33 | "Run {} to archive them", 34 | style("`git workspace archive`").yellow() 35 | ); 36 | } 37 | 38 | Ok(()) 39 | } 40 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use crate::providers::{GiteaProvider, GithubProvider, GitlabProvider, Provider}; 2 | use crate::repository::Repository; 3 | use anyhow::Context; 4 | use serde::{Deserialize, Serialize}; 5 | use std::fmt; 6 | use std::fs; 7 | use std::path::{Path, PathBuf}; 8 | 9 | #[derive(Deserialize, Serialize, Debug)] 10 | struct ConfigContents { 11 | #[serde(rename = "provider", default)] 12 | providers: Vec, 13 | } 14 | 15 | pub struct Config { 16 | files: Vec, 17 | } 18 | 19 | impl Config { 20 | pub fn new(files: Vec) -> Config { 21 | Config { files } 22 | } 23 | 24 | // Find all config files in workspace 25 | fn find_config_files(workspace: &Path) -> anyhow::Result> { 26 | let matcher = globset::GlobBuilder::new("workspace*.toml") 27 | .literal_separator(true) 28 | .build()? 29 | .compile_matcher(); 30 | let entries = fs::read_dir(workspace) 31 | .with_context(|| format!("Cannot list directory {}", workspace.display()))?; 32 | let mut config_files: Vec = entries 33 | .filter_map(Result::ok) 34 | .map(|e| e.path()) 35 | .filter(|p| { 36 | p.file_name() 37 | .map(|n| n != "workspace-lock.toml" && matcher.is_match(n)) 38 | .unwrap_or(false) 39 | }) 40 | .collect(); 41 | config_files.sort(); 42 | 43 | Ok(config_files) 44 | } 45 | 46 | pub fn from_workspace(workspace: &Path) -> anyhow::Result { 47 | let config_files = 48 | Self::find_config_files(workspace).context("Error loading config files")?; 49 | if config_files.is_empty() { 50 | anyhow::bail!("No configuration files found: Are you in the right workspace?") 51 | } 52 | Ok(Self::new(config_files)) 53 | } 54 | 55 | pub fn read(&self) -> anyhow::Result> { 56 | let mut all_providers = vec![]; 57 | 58 | for path in &self.files { 59 | if !path.exists() { 60 | continue; 61 | } 62 | let file_contents = fs::read_to_string(path) 63 | .with_context(|| format!("Cannot read file {}", path.display()))?; 64 | let contents: ConfigContents = toml::from_str(file_contents.as_str()) 65 | .with_context(|| format!("Error parsing TOML in file {}", path.display()))?; 66 | all_providers.extend(contents.providers); 67 | } 68 | Ok(all_providers) 69 | } 70 | pub fn write(&self, providers: Vec, config_path: &Path) -> anyhow::Result<()> { 71 | let toml = toml::to_string(&ConfigContents { providers })?; 72 | fs::write(config_path, toml) 73 | .with_context(|| format!("Error writing to file {}", config_path.display()))?; 74 | Ok(()) 75 | } 76 | } 77 | 78 | #[derive(Deserialize, Serialize, Debug, Eq, Ord, PartialEq, PartialOrd)] 79 | #[serde(tag = "provider")] 80 | #[serde(rename_all = "lowercase")] 81 | #[derive(clap::Subcommand)] 82 | pub enum ProviderSource { 83 | Gitea(GiteaProvider), 84 | Gitlab(GitlabProvider), 85 | Github(GithubProvider), 86 | } 87 | 88 | impl ProviderSource { 89 | pub fn provider(&self) -> &dyn Provider { 90 | match self { 91 | Self::Gitea(config) => config, 92 | Self::Gitlab(config) => config, 93 | Self::Github(config) => config, 94 | } 95 | } 96 | 97 | pub fn correctly_configured(&self) -> bool { 98 | self.provider().correctly_configured() 99 | } 100 | 101 | pub fn fetch_repositories(&self) -> anyhow::Result> { 102 | self.provider().fetch_repositories() 103 | } 104 | } 105 | 106 | impl fmt::Display for ProviderSource { 107 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 108 | write!(f, "{}", self.provider()) 109 | } 110 | } 111 | 112 | #[cfg(test)] 113 | mod tests { 114 | use super::*; 115 | use std::fs::File; 116 | use std::io::Write; 117 | use tempfile::TempDir; 118 | 119 | const WORKSPACE_FILE_CONTENT: &str = r#"[[provider]] 120 | provider = "github" 121 | name = "github-group" 122 | url = "https://api.github.com/graphql" 123 | path = "github" 124 | env_var = "GITHUB_TOKEN" 125 | skip_forks = false 126 | auth_http = true 127 | include = [] 128 | exclude = [] 129 | [[provider]] 130 | provider = "gitlab" 131 | name = "gitlab-group" 132 | url = "https://gitlab.com" 133 | path = "gitlab" 134 | env_var = "GITLAB_COM_TOKEN" 135 | auth_http = true 136 | include = [] 137 | exclude = []"#; 138 | 139 | fn create_test_config(dir: &Path, filename: &str, content: &str) -> PathBuf { 140 | let config_path = dir.join(filename); 141 | let mut file = File::create(&config_path).unwrap(); 142 | file.write_all(content.as_bytes()).unwrap(); 143 | config_path 144 | } 145 | 146 | #[test] 147 | fn test_find_config_files() { 148 | let temp_dir = TempDir::new().unwrap(); 149 | let dir_path = temp_dir.path(); 150 | 151 | // Create test config files 152 | create_test_config(dir_path, "workspace.toml", WORKSPACE_FILE_CONTENT); 153 | create_test_config(dir_path, "workspace-test.toml", WORKSPACE_FILE_CONTENT); 154 | create_test_config(dir_path, "workspace-lock.toml", "File should be ignored"); 155 | create_test_config(dir_path, "other.toml", "File should be ignored"); 156 | 157 | let config_files = Config::find_config_files(dir_path).unwrap(); 158 | assert_eq!(config_files.len(), 2); 159 | assert!(config_files[0].ends_with("workspace-test.toml")); 160 | assert!(config_files[1].ends_with("workspace.toml")); 161 | } 162 | 163 | #[test] 164 | fn test_config_from_workspace() { 165 | let temp_dir = TempDir::new().unwrap(); 166 | let dir_path = temp_dir.path(); 167 | 168 | // Test with no config files 169 | let result = Config::from_workspace(dir_path); 170 | assert!(result.is_err()); 171 | 172 | // Test with config file 173 | create_test_config(dir_path, "workspace.toml", WORKSPACE_FILE_CONTENT); 174 | 175 | let config = Config::from_workspace(dir_path).unwrap(); 176 | assert_eq!(config.files.len(), 1); 177 | } 178 | 179 | #[test] 180 | fn test_config_read() { 181 | let temp_dir = TempDir::new().unwrap(); 182 | let dir_path = temp_dir.path(); 183 | 184 | create_test_config(dir_path, "workspace.toml", WORKSPACE_FILE_CONTENT); 185 | create_test_config(dir_path, "workspace-42.toml", WORKSPACE_FILE_CONTENT); 186 | 187 | let config = Config::from_workspace(dir_path).unwrap(); 188 | let providers = config.read().unwrap(); 189 | 190 | assert_eq!(providers.len(), 4); 191 | match &providers[0] { 192 | ProviderSource::Github(config) => assert_eq!(config.name, "github-group"), 193 | _ => panic!("Expected Github provider"), 194 | } 195 | match &providers[1] { 196 | ProviderSource::Gitlab(config) => assert_eq!(config.name, "gitlab-group"), 197 | _ => panic!("Expected Gitlab provider"), 198 | } 199 | } 200 | 201 | #[test] 202 | fn test_config_write() { 203 | let temp_dir = TempDir::new().unwrap(); 204 | let config_path = temp_dir.path().join("workspace.toml"); 205 | 206 | let providers = vec![ 207 | ProviderSource::Github(GithubProvider::default()), 208 | ProviderSource::Gitlab(GitlabProvider::default()), 209 | ]; 210 | let config = Config::new(vec![config_path.clone()]); 211 | config.write(providers, &config_path).unwrap(); 212 | 213 | let content = fs::read_to_string(&config_path).unwrap(); 214 | assert!(content.contains("github")); 215 | assert!(content.contains("gitlab")); 216 | } 217 | 218 | #[test] 219 | fn test_invalid_config_content() { 220 | let temp_dir = TempDir::new().unwrap(); 221 | let dir_path = temp_dir.path(); 222 | 223 | // Create invalid config 224 | create_test_config( 225 | dir_path, 226 | "workspace.toml", 227 | r#"[[provider]] 228 | invalid = "content""#, 229 | ); 230 | 231 | let config = Config::from_workspace(dir_path).unwrap(); 232 | let result = config.read(); 233 | assert!(result.is_err()); 234 | } 235 | } 236 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate atomic_counter; 2 | extern crate clap; 3 | extern crate console; 4 | #[cfg(unix)] 5 | extern crate expanduser; 6 | extern crate fs_extra; 7 | extern crate graphql_client; 8 | extern crate indicatif; 9 | extern crate serde; 10 | extern crate serde_json; 11 | extern crate ureq; 12 | extern crate walkdir; 13 | 14 | pub mod commands; 15 | pub mod config; 16 | pub mod lockfile; 17 | pub mod providers; 18 | pub mod repository; 19 | pub mod utils; 20 | -------------------------------------------------------------------------------- /src/lockfile.rs: -------------------------------------------------------------------------------- 1 | use crate::repository::Repository; 2 | use anyhow::Context; 3 | use serde::{Deserialize, Serialize}; 4 | use std::fs; 5 | use std::path::PathBuf; 6 | 7 | pub struct Lockfile { 8 | path: PathBuf, 9 | } 10 | 11 | #[derive(Deserialize, Serialize, Debug)] 12 | struct LockfileContents { 13 | #[serde(rename = "repo")] 14 | repos: Vec, 15 | } 16 | 17 | impl Lockfile { 18 | pub fn new(path: PathBuf) -> Lockfile { 19 | Lockfile { path } 20 | } 21 | 22 | pub fn read(&self) -> anyhow::Result> { 23 | let config_data = fs::read_to_string(&self.path) 24 | .with_context(|| format!("Cannot read file {}", self.path.display()))?; 25 | let config: LockfileContents = toml::from_str(config_data.as_str()) 26 | .with_context(|| "Error deserializing".to_string())?; 27 | Ok(config.repos) 28 | } 29 | 30 | pub fn write(&self, repositories: &[Repository]) -> anyhow::Result<()> { 31 | let mut sorted_repositories = repositories.to_owned(); 32 | sorted_repositories.sort(); 33 | 34 | let toml = toml::to_string(&LockfileContents { 35 | repos: sorted_repositories, 36 | })?; 37 | fs::write(&self.path, toml) 38 | .with_context(|| format!("Error writing lockfile to {}", self.path.display()))?; 39 | 40 | Ok(()) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::{CommandFactory, Parser, ValueHint}; 2 | use git_workspace::commands::{ 3 | add_provider_to_config, archive, completion, execute_cmd, fetch, list, lock, 4 | pull_all_repositories, update, 5 | }; 6 | use git_workspace::config::ProviderSource; 7 | use git_workspace::utils::{ensure_workspace_dir_exists, expand_workspace_path}; 8 | use std::path::PathBuf; 9 | 10 | #[derive(clap::Parser)] 11 | #[command(name = "git-workspace", author, about, version)] 12 | struct Args { 13 | #[arg(short = 'w', long = "workspace", env = "GIT_WORKSPACE")] 14 | workspace: PathBuf, 15 | #[command(subcommand)] 16 | command: Command, 17 | } 18 | 19 | #[derive(clap::Parser)] 20 | enum Command { 21 | /// Update the workspace, removing and adding any repositories as needed. 22 | Update { 23 | #[arg(short = 't', long = "threads", default_value = "8")] 24 | threads: usize, 25 | }, 26 | /// Fetch new commits for all repositories in the workspace 27 | Fetch { 28 | #[arg(short = 't', long = "threads", default_value = "8")] 29 | threads: usize, 30 | }, 31 | /// Fetch all repositories from configured providers and write the lockfile 32 | Lock {}, 33 | /// Pull new commits on the primary branch for all repositories in the workspace 34 | SwitchAndPull { 35 | #[arg(short = 't', long = "threads", default_value = "8")] 36 | threads: usize, 37 | }, 38 | /// List all repositories in the workspace 39 | /// 40 | /// This command will output the names of all known repositories in the workspace. 41 | /// Passing --full will output absolute paths. 42 | List { 43 | #[arg(long = "full")] 44 | full: bool, 45 | }, 46 | /// Archive repositories that don't exist in the workspace anymore. 47 | Archive { 48 | /// Disable confirmation prompt 49 | #[arg(long = "force")] 50 | force: bool, 51 | }, 52 | /// Run a git command in all repositories 53 | /// 54 | /// This command executes the "command" in all git workspace repositories. 55 | /// The program will receive the given "args", and have it's working directory 56 | /// set to the repository directory. 57 | Run { 58 | #[arg(short = 't', long = "threads", default_value = "8")] 59 | threads: usize, 60 | #[arg(required = true)] 61 | command: String, 62 | args: Vec, 63 | }, 64 | /// Add a provider to the configuration 65 | Add { 66 | #[arg(long = "file", default_value = "workspace.toml")] 67 | file: PathBuf, 68 | #[command(subcommand)] 69 | command: ProviderSource, 70 | }, 71 | /// Generate shell completions 72 | Completion { 73 | /// The shell to generate the completion script for 74 | shell: clap_complete::Shell, 75 | }, 76 | } 77 | 78 | fn main() -> anyhow::Result<()> { 79 | // Parse our arguments to Args using clap. 80 | let args = Args::parse(); 81 | handle_main(args) 82 | } 83 | 84 | fn handle_main(args: Args) -> anyhow::Result<()> { 85 | let workspace_path = expand_workspace_path(&args.workspace)?; 86 | let workspace_path = ensure_workspace_dir_exists(&workspace_path)?; 87 | 88 | // Run our sub command. Pretty self-explanatory. 89 | match args.command { 90 | Command::List { full } => list(&workspace_path, full)?, 91 | Command::Update { threads } => { 92 | lock(&workspace_path)?; 93 | update(&workspace_path, threads)? 94 | } 95 | Command::Lock {} => { 96 | lock(&workspace_path)?; 97 | } 98 | Command::Archive { force } => archive(&workspace_path, force)?, 99 | Command::Fetch { threads } => fetch(&workspace_path, threads)?, 100 | Command::Add { file, command } => add_provider_to_config(&workspace_path, command, &file)?, 101 | Command::Run { 102 | threads, 103 | command, 104 | args, 105 | } => execute_cmd(&workspace_path, threads, command, args)?, 106 | Command::SwitchAndPull { threads } => pull_all_repositories(&workspace_path, threads)?, 107 | Command::Completion { shell } => completion(shell, &mut Args::command())?, 108 | }; 109 | Ok(()) 110 | } 111 | -------------------------------------------------------------------------------- /src/providers/gitea.rs: -------------------------------------------------------------------------------- 1 | use crate::providers::{ 2 | create_exclude_regex_set, create_include_regex_set, Provider, APP_USER_AGENT, 3 | }; 4 | use crate::repository::Repository; 5 | use anyhow::Context; 6 | use console::style; 7 | use serde::{Deserialize, Serialize}; 8 | use std::env; 9 | use std::fmt; 10 | 11 | #[derive(Deserialize, Debug)] 12 | struct GiteaRepository { 13 | full_name: String, 14 | clone_url: String, 15 | ssh_url: String, 16 | default_branch: String, 17 | archived: bool, 18 | fork: bool, 19 | } 20 | 21 | fn default_env_var() -> String { 22 | String::from("GITEA_TOKEN") 23 | } 24 | 25 | static DEFAULT_GITEA_URL: &str = "https://gitea.com"; 26 | 27 | fn public_gitea_url() -> String { 28 | DEFAULT_GITEA_URL.to_string() 29 | } 30 | 31 | #[derive(Deserialize, Serialize, Debug, Eq, Ord, PartialEq, PartialOrd, clap::Parser)] 32 | #[serde(rename_all = "lowercase")] 33 | #[command(about = "Add a Gitea user or organization by name")] 34 | pub struct GiteaProvider { 35 | /// The name of the user or organisation to add 36 | pub name: String, 37 | 38 | #[arg(long = "path", default_value = "gitea")] 39 | /// Clone repos to a specific path 40 | path: String, 41 | 42 | #[arg(long = "env-name", short = 'e', default_value = "GITEA_TOKEN")] 43 | #[serde(default = "default_env_var")] 44 | /// Environment variable containing the auth token 45 | env_var: String, 46 | 47 | #[arg(long = "skip-forks")] 48 | #[serde(default)] 49 | /// Don't clone forked repositories 50 | skip_forks: bool, 51 | 52 | #[arg(long = "include")] 53 | #[serde(default)] 54 | /// Only clone repositories that match these regular expressions 55 | include: Vec, 56 | 57 | #[arg(long = "auth-http")] 58 | #[serde(default)] 59 | /// Use HTTP authentication instead of SSH 60 | auth_http: bool, 61 | 62 | #[arg(long = "exclude")] 63 | #[serde(default)] 64 | /// Don't clone repositories that match these regular expressions 65 | exclude: Vec, 66 | 67 | #[arg(long = "url", default_value = DEFAULT_GITEA_URL)] 68 | #[serde(default = "public_gitea_url")] 69 | /// Gitea instance URL 70 | pub url: String, 71 | } 72 | 73 | impl fmt::Display for GiteaProvider { 74 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 75 | write!( 76 | f, 77 | "Gitea user/org {} at {} in directory {}, using the token stored in {}", 78 | style(&self.name.to_lowercase()).green(), 79 | style(&self.url).green(), 80 | style(&self.path).green(), 81 | style(&self.env_var).green(), 82 | ) 83 | } 84 | } 85 | 86 | impl Provider for GiteaProvider { 87 | fn correctly_configured(&self) -> bool { 88 | let token = env::var(&self.env_var); 89 | if token.is_err() { 90 | println!( 91 | "{}", 92 | style(format!( 93 | "Error: {} environment variable is not defined", 94 | self.env_var 95 | )) 96 | .red() 97 | ); 98 | println!("Create an access token in your Gitea Settings -> Applications"); 99 | println!( 100 | "Then set a {} environment variable with the value", 101 | style(&self.env_var).green() 102 | ); 103 | return false; 104 | } 105 | if self.name.ends_with('/') { 106 | println!( 107 | "{}", 108 | style("Error: Ensure that names do not end in forward slashes").red() 109 | ); 110 | println!("You specified: {}", self.name); 111 | return false; 112 | } 113 | true 114 | } 115 | 116 | fn fetch_repositories(&self) -> anyhow::Result> { 117 | let gitea_token = env::var(&self.env_var) 118 | .with_context(|| format!("Missing {} environment variable", self.env_var))?; 119 | 120 | let include_regex_set = create_include_regex_set(&self.include)?; 121 | let exclude_regex_set = create_exclude_regex_set(&self.exclude)?; 122 | 123 | let agent = ureq::AgentBuilder::new() 124 | .https_only(true) 125 | .user_agent(APP_USER_AGENT) 126 | .build(); 127 | 128 | let mut page = 1; 129 | let mut repositories = Vec::new(); 130 | 131 | loop { 132 | let url = format!( 133 | "{}/api/v1/users/{}/repos?page={}&limit=50", 134 | self.url, self.name, page 135 | ); 136 | 137 | let response = agent 138 | .get(&url) 139 | .set("Authorization", &format!("token {}", gitea_token)) 140 | .call()?; 141 | 142 | let repos: Vec = response.into_json()?; 143 | if repos.is_empty() { 144 | break; 145 | } 146 | 147 | repositories.extend( 148 | repos 149 | .into_iter() 150 | .filter(|r| !r.archived) 151 | .filter(|r| !self.skip_forks || !r.fork) 152 | .filter(|r| include_regex_set.is_match(&r.full_name)) 153 | .filter(|r| !exclude_regex_set.is_match(&r.full_name)) 154 | .map(|r| { 155 | Repository::new( 156 | format!("{}/{}", self.path, r.full_name), 157 | if self.auth_http { 158 | r.clone_url 159 | } else { 160 | r.ssh_url 161 | }, 162 | Some(r.default_branch), 163 | None, 164 | ) 165 | }), 166 | ); 167 | 168 | page += 1; 169 | } 170 | 171 | Ok(repositories) 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /src/providers/github.rs: -------------------------------------------------------------------------------- 1 | use crate::providers::{ 2 | create_exclude_regex_set, create_include_regex_set, Provider, APP_USER_AGENT, 3 | }; 4 | use crate::repository::Repository; 5 | use anyhow::{bail, Context}; 6 | use console::style; 7 | use graphql_client::{GraphQLQuery, Response}; 8 | use serde::{Deserialize, Serialize}; 9 | use serde_json::json; 10 | use std::env; 11 | use std::fmt; 12 | 13 | // See https://github.com/graphql-rust/graphql-client/blob/master/graphql_client/tests/custom_scalars.rs#L6 14 | type GitSSHRemote = String; 15 | #[allow(clippy::upper_case_acronyms)] 16 | type URI = String; 17 | 18 | #[derive(GraphQLQuery)] 19 | #[graphql( 20 | schema_path = "src/providers/graphql/github/schema.graphql", 21 | query_path = "src/providers/graphql/github/projects.graphql", 22 | response_derives = "Debug" 23 | )] 24 | pub struct Repositories; 25 | 26 | fn default_env_var() -> String { 27 | String::from("GITHUB_TOKEN") 28 | } 29 | 30 | static DEFAULT_GITHUB_URL: &str = "https://api.github.com/graphql"; 31 | 32 | fn public_github_url() -> String { 33 | DEFAULT_GITHUB_URL.to_string() 34 | } 35 | 36 | #[derive(Deserialize, Serialize, Default, Debug, Eq, Ord, PartialEq, PartialOrd, clap::Parser)] 37 | #[serde(rename_all = "lowercase")] 38 | #[command(about = "Add a Github user or organization by name")] 39 | pub struct GithubProvider { 40 | /// The name of the user or organisation to add. 41 | pub name: String, 42 | #[arg(long = "path", default_value = "github")] 43 | /// Clone repositories to a specific base path 44 | path: String, 45 | #[arg(long = "env-name", short = 'e', default_value = "GITHUB_TOKEN")] 46 | #[serde(default = "default_env_var")] 47 | /// Environment variable containing the auth token 48 | env_var: String, 49 | 50 | #[arg(long = "skip-forks")] 51 | #[serde(default)] 52 | /// Don't clone forked repositories 53 | skip_forks: bool, 54 | 55 | #[arg(long = "include")] 56 | #[serde(default)] 57 | /// Only clone repositories that match these regular expressions. The repository name 58 | /// includes the user or organisation name. 59 | include: Vec, 60 | 61 | #[arg(long = "auth-http")] 62 | #[serde(default)] 63 | /// Use HTTP authentication instead of SSH 64 | auth_http: bool, 65 | 66 | #[arg(long = "exclude")] 67 | #[serde(default)] 68 | /// Don't clone repositories that match these regular expressions. The repository name 69 | /// includes the user or organisation name. 70 | exclude: Vec, 71 | 72 | #[serde(default = "public_github_url")] 73 | #[arg(long = "url", default_value = DEFAULT_GITHUB_URL)] 74 | /// Github instance URL, if using Github Enterprise this should be 75 | /// http(s)://HOSTNAME/api/graphql 76 | pub url: String, 77 | } 78 | 79 | impl fmt::Display for GithubProvider { 80 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 81 | write!( 82 | f, 83 | "Github user/org {} in directory {}, using the token stored in {}", 84 | style(&self.name.to_lowercase()).green(), 85 | style(&self.path.to_lowercase()).green(), 86 | style(&self.env_var).green(), 87 | ) 88 | } 89 | } 90 | 91 | impl GithubProvider { 92 | fn parse_repo( 93 | &self, 94 | path: &str, 95 | repo: &repositories::RepositoriesRepositoryOwnerRepositoriesNodes, 96 | ) -> Repository { 97 | let default_branch = repo 98 | .default_branch_ref 99 | .as_ref() 100 | .map(|branch| branch.name.clone()); 101 | let upstream = repo.parent.as_ref().map(|parent| parent.ssh_url.clone()); 102 | 103 | Repository::new( 104 | format!("{}/{}", path, repo.name_with_owner.clone()), 105 | if self.auth_http { 106 | repo.url.clone() 107 | } else { 108 | repo.ssh_url.clone() 109 | }, 110 | default_branch, 111 | upstream, 112 | ) 113 | } 114 | } 115 | 116 | impl Provider for GithubProvider { 117 | fn correctly_configured(&self) -> bool { 118 | let token = env::var(&self.env_var); 119 | if token.is_err() { 120 | println!( 121 | "{}", 122 | style(format!( 123 | "Error: {} environment variable is not defined", 124 | self.env_var 125 | )) 126 | .red() 127 | ); 128 | if self.url == public_github_url() { 129 | println!( 130 | "Create a personal access token here: {}", 131 | style("https://github.com/settings/tokens").green() 132 | ); 133 | } else { 134 | println!( 135 | "Create a personal access token in your {}.", 136 | style("Github Enterprise server").green() 137 | ); 138 | } 139 | 140 | println!( 141 | "Then set a {} environment variable with the value", 142 | style(&self.env_var).green() 143 | ); 144 | return false; 145 | } 146 | if self.name.ends_with('/') { 147 | println!( 148 | "{}", 149 | style("Error: Ensure that names do not end in forward slashes").red() 150 | ); 151 | println!("You specified: {}", self.name); 152 | return false; 153 | } 154 | true 155 | } 156 | 157 | fn fetch_repositories(&self) -> anyhow::Result> { 158 | let github_token = env::var(&self.env_var) 159 | .with_context(|| format!("Missing {} environment variable", self.env_var))?; 160 | 161 | let auth_header = match github_token.as_str() { 162 | "none" => "none".to_string(), 163 | token => { 164 | format!("Bearer {}", token) 165 | } 166 | }; 167 | 168 | let mut repositories = vec![]; 169 | 170 | let mut after = None; 171 | 172 | let include_regex_set = create_include_regex_set(&self.include)?; 173 | let exclude_regex_set = create_exclude_regex_set(&self.exclude)?; 174 | 175 | // include_forks needs to be None instead of true, as the graphql parameter has three 176 | // states: false - no forks, true - only forks, none - all repositories. 177 | let include_forks: Option = if self.skip_forks { Some(false) } else { None }; 178 | 179 | let agent = ureq::AgentBuilder::new() 180 | .https_only(true) 181 | .user_agent(APP_USER_AGENT) 182 | .build(); 183 | 184 | loop { 185 | let q = Repositories::build_query(repositories::Variables { 186 | login: self.name.to_lowercase(), 187 | include_forks, 188 | after, 189 | }); 190 | let res = agent 191 | .post(&self.url) 192 | .set("Authorization", &auth_header) 193 | .send_json(json!(&q)); 194 | 195 | let res = match res { 196 | Ok(response) => response, 197 | Err(ureq::Error::Status(status, response)) => match response.into_string() { 198 | Ok(resp) => { 199 | bail!("Got status code {status}. Body: {resp}") 200 | } 201 | Err(e) => { 202 | bail!("Got status code {status}. Error reading body: {e}") 203 | } 204 | }, 205 | Err(e) => return Err(e.into()), 206 | }; 207 | 208 | let body = res.into_string()?; 209 | let response_data: Response = serde_json::from_str(&body)?; 210 | 211 | if let Some(errors) = response_data.errors { 212 | let total_errors = errors.len(); 213 | let combined_errors: Vec<_> = errors.into_iter().map(|e| e.message).collect(); 214 | let combined_message = combined_errors.join("\n"); 215 | bail!( 216 | "Received {} errors. Errors:\n{}", 217 | total_errors, 218 | combined_message 219 | ); 220 | } 221 | 222 | let response_repositories = response_data 223 | .data 224 | .with_context(|| format!("Invalid response from GitHub: {}", body))? 225 | .repository_owner 226 | .with_context(|| format!("Invalid response from GitHub: {}", body))? 227 | .repositories; 228 | 229 | repositories.extend( 230 | response_repositories 231 | .nodes 232 | .unwrap() 233 | .iter() 234 | .map(|r| r.as_ref().unwrap()) 235 | .filter(|r| !r.is_archived) 236 | .filter(|r| include_regex_set.is_match(&r.name_with_owner)) 237 | .filter(|r| !exclude_regex_set.is_match(&r.name_with_owner)) 238 | .map(|repo| self.parse_repo(&self.path, repo)), 239 | ); 240 | 241 | if !response_repositories.page_info.has_next_page { 242 | break; 243 | } 244 | after = response_repositories.page_info.end_cursor; 245 | } 246 | 247 | Ok(repositories) 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /src/providers/gitlab.rs: -------------------------------------------------------------------------------- 1 | use crate::providers::{ 2 | create_exclude_regex_set, create_include_regex_set, Provider, APP_USER_AGENT, 3 | }; 4 | use crate::repository::Repository; 5 | use anyhow::{anyhow, Context}; 6 | use console::style; 7 | use graphql_client::{GraphQLQuery, Response}; 8 | use serde::{Deserialize, Serialize}; 9 | use serde_json::json; 10 | use std::env; 11 | use std::fmt; 12 | 13 | // GraphQL queries we use to fetch user and group repositories. 14 | // Right now, annoyingly, Gitlab has a bug around GraphQL pagination: 15 | // https://gitlab.com/gitlab-org/gitlab/issues/33419 16 | // So, we don't paginate at all in these queries. I'll fix this once 17 | // the issue is closed. 18 | 19 | #[derive(GraphQLQuery)] 20 | #[graphql( 21 | schema_path = "src/providers/graphql/gitlab/schema.json", 22 | query_path = "src/providers/graphql/gitlab/projects.graphql", 23 | response_derives = "Debug" 24 | )] 25 | pub struct Repositories; 26 | 27 | struct ProjectNode { 28 | archived: bool, 29 | full_path: String, 30 | ssh_url: String, 31 | http_url: String, 32 | root_ref: Option, 33 | } 34 | 35 | impl From for ProjectNode { 36 | fn from(item: repositories::RepositoriesGroupProjectsEdgesNode) -> Self { 37 | Self { 38 | archived: item.archived.unwrap(), 39 | root_ref: item.repository.and_then(|r| r.root_ref), 40 | ssh_url: item.ssh_url_to_repo.expect("Unknown SSH URL"), 41 | http_url: item.http_url_to_repo.expect("Unknown HTTP URL"), 42 | full_path: item.full_path, 43 | } 44 | } 45 | } 46 | 47 | impl From for ProjectNode { 48 | fn from(item: repositories::RepositoriesNamespaceProjectsEdgesNode) -> Self { 49 | Self { 50 | archived: item.archived.unwrap(), 51 | root_ref: item.repository.and_then(|r| r.root_ref), 52 | ssh_url: item.ssh_url_to_repo.expect("Unknown SSH URL"), 53 | http_url: item.http_url_to_repo.expect("Unknown HTTP URL"), 54 | full_path: item.full_path, 55 | } 56 | } 57 | } 58 | 59 | static DEFAULT_GITLAB_URL: &str = "https://gitlab.com"; 60 | 61 | fn public_gitlab_url() -> String { 62 | DEFAULT_GITLAB_URL.to_string() 63 | } 64 | 65 | fn default_env_var() -> String { 66 | String::from("GITHUB_TOKEN") 67 | } 68 | 69 | #[derive(Deserialize, Serialize, Default, Debug, Eq, Ord, PartialEq, PartialOrd, clap::Parser)] 70 | #[serde(rename_all = "lowercase")] 71 | #[command(about = "Add a Gitlab user or group by name")] 72 | pub struct GitlabProvider { 73 | /// The name of the gitlab group or namespace to add. Can include slashes. 74 | pub name: String, 75 | #[serde(default = "public_gitlab_url")] 76 | #[arg(long = "url", default_value = DEFAULT_GITLAB_URL)] 77 | /// Gitlab instance URL 78 | pub url: String, 79 | #[arg(long = "path", default_value = "gitlab")] 80 | /// Clone repos to a specific path 81 | path: String, 82 | #[arg(long = "env-name", short = 'e', default_value = "GITLAB_TOKEN")] 83 | #[serde(default = "default_env_var")] 84 | /// Environment variable containing the auth token 85 | env_var: String, 86 | 87 | #[arg(long = "include")] 88 | #[serde(default)] 89 | /// Only clone repositories that match these regular expressions. The repository name 90 | /// includes the user or organisation name. 91 | include: Vec, 92 | 93 | #[arg(long = "auth-http")] 94 | #[serde(default)] 95 | /// Use HTTP authentication instead of SSH 96 | auth_http: bool, 97 | 98 | #[arg(long = "exclude")] 99 | #[serde(default)] 100 | /// Don't clone repositories that match these regular expressions. The repository name 101 | /// includes the user or organisation name. 102 | exclude: Vec, 103 | // Currently does not work. 104 | // https://gitlab.com/gitlab-org/gitlab/issues/121595 105 | // #[arg(long = "skip-forks")] 106 | // #[arg(about = "Don't clone forked repositories")] 107 | // #[serde(default = "default_forks")] 108 | // skip_forks: bool, 109 | } 110 | 111 | impl fmt::Display for GitlabProvider { 112 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 113 | write!( 114 | f, 115 | "Gitlab user/group {} at {} in directory {}, using the token stored in {}", 116 | style(&self.name.to_lowercase()).green(), 117 | style(&self.url).green(), 118 | style(&self.path).green(), 119 | style(&self.env_var).green(), 120 | ) 121 | } 122 | } 123 | 124 | impl Provider for GitlabProvider { 125 | fn correctly_configured(&self) -> bool { 126 | let token = env::var(&self.env_var); 127 | if token.is_err() { 128 | println!( 129 | "{}", 130 | style(format!( 131 | "Error: {} environment variable is not defined", 132 | self.env_var 133 | )) 134 | .red() 135 | ); 136 | println!("Create a personal access token here:"); 137 | println!("{}/profile/personal_access_tokens", self.url); 138 | println!( 139 | "Set an environment variable called {} with the value", 140 | self.env_var 141 | ); 142 | return false; 143 | } 144 | if self.name.ends_with('/') { 145 | println!( 146 | "{}", 147 | style("Error: Ensure that names do not end in forward slashes").red() 148 | ); 149 | println!("You specified: {}", self.name); 150 | return false; 151 | } 152 | true 153 | } 154 | fn fetch_repositories(&self) -> anyhow::Result> { 155 | let gitlab_token = env::var(&self.env_var) 156 | .with_context(|| format!("Missing {} environment variable", self.env_var))?; 157 | let mut repositories = vec![]; 158 | let mut after = Some("".to_string()); 159 | let name = self.name.to_string().to_lowercase(); 160 | 161 | let include_regex_set = create_include_regex_set(&self.include)?; 162 | let exclude_regex_set = create_exclude_regex_set(&self.exclude)?; 163 | 164 | let agent = ureq::AgentBuilder::new() 165 | .https_only(true) 166 | .user_agent(APP_USER_AGENT) 167 | .build(); 168 | 169 | loop { 170 | let q = Repositories::build_query(repositories::Variables { 171 | name: name.clone(), 172 | after, 173 | }); 174 | let res = agent 175 | .post(format!("{}/api/graphql", self.url).as_str()) 176 | .set("Authorization", format!("Bearer {}", gitlab_token).as_str()) 177 | .set("Content-Type", "application/json") 178 | .send_json(json!(&q))?; 179 | let json = res.into_json()?; 180 | 181 | let response_body: Response = serde_json::from_value(json)?; 182 | let data = response_body.data.expect("Missing data"); 183 | 184 | let temp_repositories: Vec; 185 | // This is annoying but I'm still not sure how to unify it. 186 | if data.group.is_some() { 187 | let group_data = data.group.expect("Missing group").projects; 188 | temp_repositories = group_data 189 | .edges 190 | .expect("missing edges") 191 | .into_iter() 192 | // Some(T) -> T 193 | .flatten() 194 | // Extract the node, which is also Some(T) 195 | .filter_map(|x| x.node) 196 | .map(ProjectNode::from) 197 | .collect(); 198 | after = group_data.page_info.end_cursor; 199 | } else if data.namespace.is_some() { 200 | let namespace_data = data.namespace.expect("Missing namespace").projects; 201 | temp_repositories = namespace_data 202 | .edges 203 | .expect("missing edges") 204 | .into_iter() 205 | // Some(T) -> T 206 | .flatten() 207 | // Extract the node, which is also Some(T) 208 | .filter_map(|x| x.node) 209 | .map(ProjectNode::from) 210 | .collect(); 211 | after = namespace_data.page_info.end_cursor; 212 | } else { 213 | return Err(anyhow!( 214 | "Gitlab group/user {} could not be found. Are you sure you have access?", 215 | name 216 | )); 217 | } 218 | 219 | repositories.extend( 220 | temp_repositories 221 | .into_iter() 222 | .filter(|r| !r.archived) 223 | .filter(|r| include_regex_set.is_match(&r.full_path)) 224 | .filter(|r| !exclude_regex_set.is_match(&r.full_path)) 225 | .map(|r| { 226 | Repository::new( 227 | format!("{}/{}", self.path, r.full_path), 228 | if self.auth_http { 229 | r.http_url 230 | } else { 231 | r.ssh_url 232 | }, 233 | r.root_ref, 234 | None, 235 | ) 236 | }), 237 | ); 238 | 239 | if after.is_none() { 240 | break; 241 | } 242 | } 243 | Ok(repositories) 244 | } 245 | } 246 | -------------------------------------------------------------------------------- /src/providers/graphql/github/.graphqlconfig: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Github GraphQL", 3 | "schemaPath": "schema.graphql", 4 | "extensions": { 5 | "endpoints": { 6 | "Github": { 7 | "url": "https://api.github.com/graphql", 8 | "headers": { 9 | "user-agent": "JS GraphQL", 10 | "Authorization": "Bearer ${env:GITHUB_TOKEN}" 11 | }, 12 | "introspect": true 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /src/providers/graphql/github/projects.graphql: -------------------------------------------------------------------------------- 1 | query Repositories($login: String!, $after: String, $includeForks: Boolean) { 2 | repositoryOwner(login: $login) { 3 | __typename, 4 | repositories(first: 100, after: $after, isFork: $includeForks) { 5 | pageInfo { 6 | hasNextPage 7 | endCursor 8 | } 9 | nodes { 10 | nameWithOwner, 11 | url 12 | sshUrl, 13 | isArchived, 14 | defaultBranchRef { 15 | name 16 | }, 17 | parent { 18 | url 19 | sshUrl, 20 | } 21 | } 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/providers/graphql/gitlab/.graphqlconfig: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Gitlab GraphQL", 3 | "schemaPath": "schema.json", 4 | "extensions": { 5 | "endpoints": { 6 | "Gitlab": { 7 | "url": "https://gitlab.com/api/graphql", 8 | "headers": { 9 | "user-agent": "JS GraphQL", 10 | "Authorization": "Bearer ${env:GITLAB_TOKEN}" 11 | }, 12 | "introspect": true 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /src/providers/graphql/gitlab/projects.graphql: -------------------------------------------------------------------------------- 1 | query Repositories($name: ID!, $after: String) { 2 | namespace(fullPath: $name) { 3 | projects(includeSubgroups: true, after: $after) { 4 | edges { 5 | node { 6 | archived, 7 | fullPath, 8 | sshUrlToRepo, 9 | httpUrlToRepo, 10 | repository { 11 | rootRef 12 | } 13 | } 14 | } 15 | pageInfo { 16 | hasNextPage, 17 | endCursor, 18 | } 19 | } 20 | } 21 | group(fullPath: $name) { 22 | projects(includeSubgroups: true, after: $after) { 23 | edges { 24 | node { 25 | archived, 26 | fullPath, 27 | sshUrlToRepo, 28 | httpUrlToRepo, 29 | repository { 30 | rootRef 31 | } 32 | } 33 | }, 34 | pageInfo { 35 | hasNextPage, 36 | endCursor, 37 | } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/providers/mod.rs: -------------------------------------------------------------------------------- 1 | mod gitea; 2 | mod github; 3 | mod gitlab; 4 | 5 | use crate::repository::Repository; 6 | use anyhow::Context; 7 | pub use gitea::GiteaProvider; 8 | pub use github::GithubProvider; 9 | pub use gitlab::GitlabProvider; 10 | use std::fmt; 11 | 12 | pub static APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),); 13 | 14 | pub trait Provider: fmt::Display { 15 | /// Returns true if the provider should work, otherwise prints an error and return false 16 | fn correctly_configured(&self) -> bool; 17 | fn fetch_repositories(&self) -> anyhow::Result>; 18 | } 19 | 20 | pub fn create_exclude_regex_set(items: &Vec) -> anyhow::Result { 21 | if items.is_empty() { 22 | Ok(regex::RegexSet::empty()) 23 | } else { 24 | Ok(regex::RegexSet::new(items).context("Error parsing exclude regular expressions")?) 25 | } 26 | } 27 | 28 | pub fn create_include_regex_set(items: &Vec) -> anyhow::Result { 29 | if items.is_empty() { 30 | let all = vec![".*"]; 31 | Ok(regex::RegexSet::new(all).context("Error parsing include regular expressions")?) 32 | } else { 33 | Ok(regex::RegexSet::new(items).context("Error parsing include regular expressions")?) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/repository.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Context}; 2 | use console::{strip_ansi_codes, truncate_str}; 3 | use git2::build::CheckoutBuilder; 4 | use git2::{Repository as Git2Repository, StatusOptions}; 5 | use indicatif::ProgressBar; 6 | use serde::{Deserialize, Serialize}; 7 | use std::io::{BufRead, BufReader}; 8 | use std::path::{Path, PathBuf}; 9 | use std::process::{Command, Stdio}; 10 | 11 | // Eq, Ord and friends are needed to order the list of repositories 12 | #[derive(Deserialize, Serialize, Debug, Clone, Eq, Ord, PartialEq, PartialOrd)] 13 | pub struct Repository { 14 | path: String, 15 | url: String, 16 | pub upstream: Option, 17 | pub branch: Option, 18 | } 19 | 20 | impl Repository { 21 | pub fn new( 22 | path: String, 23 | url: String, 24 | branch: Option, 25 | upstream: Option, 26 | ) -> Repository { 27 | // We have to normalize repository names here. On windows if you do `path.join(self.name())` 28 | // it will cause issues if the name contains a forward slash. So here we just normalize it 29 | // to the path separator on the system. 30 | let norm_path = if cfg!(windows) { 31 | path.replace('/', std::path::MAIN_SEPARATOR.to_string().as_str()) 32 | } else { 33 | path 34 | }; 35 | 36 | Repository { 37 | path: norm_path, 38 | url, 39 | branch, 40 | upstream, 41 | } 42 | } 43 | 44 | pub fn set_upstream(&self, root: &Path) -> anyhow::Result<()> { 45 | let upstream = match &self.upstream { 46 | Some(upstream) => upstream, 47 | None => return Ok(()), 48 | }; 49 | 50 | let mut command = Command::new("git"); 51 | let child = command 52 | .arg("-C") 53 | .arg(root.join(self.name())) 54 | .arg("remote") 55 | .arg("rm") 56 | .arg("upstream") 57 | .stdout(Stdio::null()) 58 | .stderr(Stdio::null()); 59 | 60 | child.status()?; 61 | 62 | let mut command = Command::new("git"); 63 | let child = command 64 | .arg("-C") 65 | .arg(root.join(self.name())) 66 | .arg("remote") 67 | .arg("add") 68 | .arg("upstream") 69 | .arg(upstream); 70 | 71 | let output = child.output()?; 72 | if !output.status.success() { 73 | let stderr = 74 | std::str::from_utf8(&output.stderr).with_context(|| "Error decoding git output")?; 75 | return Err(anyhow!( 76 | "Failed to set upstream on repo {}: {}", 77 | root.display(), 78 | stderr.trim() 79 | )); 80 | } 81 | Ok(()) 82 | } 83 | 84 | fn run_with_progress( 85 | &self, 86 | command: &mut Command, 87 | progress_bar: &ProgressBar, 88 | ) -> anyhow::Result<()> { 89 | progress_bar.set_message(format!("{}: starting", self.name())); 90 | let mut spawned = command 91 | .stdin(Stdio::null()) 92 | .stdout(Stdio::piped()) 93 | .stderr(Stdio::piped()) 94 | .spawn() 95 | .with_context(|| format!("Error starting command {:?}", command))?; 96 | 97 | let mut last_line = format!("{}: running...", self.name()); 98 | progress_bar.set_message(last_line.clone()); 99 | 100 | if let Some(ref mut stderr) = spawned.stderr { 101 | let lines = BufReader::new(stderr).split(b'\r'); 102 | for line in lines { 103 | let output = line.unwrap(); 104 | if output.is_empty() { 105 | continue; 106 | } 107 | let line = std::str::from_utf8(&output).unwrap(); 108 | let plain_line = strip_ansi_codes(line).replace('\n', " "); 109 | let truncated_line = truncate_str(plain_line.trim(), 70, "..."); 110 | progress_bar.set_message(format!("{}: {}", self.name(), truncated_line)); 111 | last_line = plain_line; 112 | } 113 | } 114 | let exit_code = spawned 115 | .wait() 116 | .context("Error waiting for process to finish")?; 117 | if !exit_code.success() { 118 | return Err(anyhow!( 119 | "Git exited with code {}: {}", 120 | exit_code.code().unwrap(), 121 | last_line 122 | )); 123 | } 124 | Ok(()) 125 | } 126 | 127 | pub fn execute_cmd( 128 | &self, 129 | root: &Path, 130 | progress_bar: &ProgressBar, 131 | cmd: &str, 132 | args: &[String], 133 | ) -> anyhow::Result<()> { 134 | let mut command = Command::new(cmd); 135 | let child = command.args(args).current_dir(root.join(self.name())); 136 | 137 | self.run_with_progress(child, progress_bar) 138 | .with_context(|| format!("Error running command in repo {}", self.name()))?; 139 | 140 | Ok(()) 141 | } 142 | 143 | pub fn switch_to_primary_branch(&self, root: &Path) -> anyhow::Result<()> { 144 | let branch = match &self.branch { 145 | None => return Ok(()), 146 | Some(b) => b, 147 | }; 148 | let repo = Git2Repository::init(root.join(self.name()))?; 149 | let status = repo.statuses(Some(&mut StatusOptions::default()))?; 150 | if !status.is_empty() { 151 | return Err(anyhow!( 152 | "Repository is dirty, cannot switch to branch {}", 153 | branch 154 | )); 155 | } 156 | repo.set_head(&format!("refs/heads/{}", branch)) 157 | .with_context(|| format!("Cannot find branch {}", branch))?; 158 | repo.checkout_head(Some(CheckoutBuilder::default().safe().force())) 159 | .with_context(|| format!("Error checking out branch {}", branch))?; 160 | Ok(()) 161 | } 162 | 163 | pub fn clone(&self, root: &Path, progress_bar: &ProgressBar) -> anyhow::Result<()> { 164 | let mut command = Command::new("git"); 165 | 166 | let child = command 167 | .arg("clone") 168 | .arg("--recurse-submodules") 169 | .arg("--progress") 170 | .arg(&self.url) 171 | .arg(root.join(self.name())); 172 | 173 | self.run_with_progress(child, progress_bar) 174 | .with_context(|| { 175 | format!("Error cloning repo into {} from {}", self.name(), &self.url) 176 | })?; 177 | 178 | Ok(()) 179 | } 180 | pub fn name(&self) -> &String { 181 | &self.path 182 | } 183 | pub fn get_path(&self, root: &Path) -> anyhow::Result { 184 | let joined = root.join(self.name()); 185 | joined 186 | .canonicalize() 187 | .with_context(|| format!("Cannot resolve {}", joined.display())) 188 | } 189 | pub fn exists(&self, root: &Path) -> bool { 190 | match self.get_path(root) { 191 | Ok(path) => { 192 | let git_dir = root.join(path).join(".git"); 193 | git_dir.exists() && git_dir.is_dir() 194 | } 195 | Err(_) => false, 196 | } 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use std::io; 3 | use std::io::Write; 4 | use std::path::{Path, PathBuf}; 5 | 6 | // From https://docs.rs/clt/latest/src/clt/term.rs.html#277-293 7 | fn build_prompt_text( 8 | text: &str, 9 | suffix: &str, 10 | show_default: bool, 11 | default: Option<&str>, 12 | ) -> String { 13 | let prompt_text = if default.is_some() && show_default { 14 | format!("{} [{}]", text, default.unwrap()) 15 | } else { 16 | text.to_string() 17 | }; 18 | prompt_text + suffix 19 | } 20 | 21 | fn get_prompt_input(prompt_text: &str) -> String { 22 | print!("{}", prompt_text); 23 | io::stdout().flush().unwrap(); 24 | let mut input = String::new(); 25 | io::stdin() 26 | .read_line(&mut input) 27 | .expect("Failed to read line"); 28 | input.trim_end_matches('\n').to_string() 29 | } 30 | 31 | pub fn confirm(text: &str, default: bool, prompt_suffix: &str, show_default: bool) -> bool { 32 | let default_string = match default { 33 | true => Some("Y/n"), 34 | false => Some("y/N"), 35 | }; 36 | let prompt_text = build_prompt_text(text, prompt_suffix, show_default, default_string); 37 | 38 | loop { 39 | let prompt_input = get_prompt_input(&prompt_text).to_ascii_lowercase(); 40 | match prompt_input.trim() { 41 | "y" | "yes" => { 42 | return true; 43 | } 44 | "n" | "no" => { 45 | return false; 46 | } 47 | "" => { 48 | return default; 49 | } 50 | _ => { 51 | println!("Error: invalid input"); 52 | } 53 | } 54 | } 55 | } 56 | 57 | // Convert our workspace path to a PathBuf. We cannot use the value given directly as 58 | // it could contain a tilde, so we run `expanduser` on it _if_ we are on a Unix platform. 59 | // On Windows this isn't supported. 60 | #[cfg(unix)] 61 | pub fn expand_workspace_path(path: &Path) -> anyhow::Result { 62 | expanduser::expanduser(path.to_string_lossy()) 63 | .with_context(|| "Error expanding git workspace path") 64 | } 65 | 66 | #[cfg(not(unix))] 67 | pub fn expand_workspace_path(path: &Path) -> anyhow::Result { 68 | Ok(path.to_path_buf()) 69 | } 70 | 71 | pub fn ensure_workspace_dir_exists(path: &PathBuf) -> anyhow::Result { 72 | if !path.exists() { 73 | fs_extra::dir::create_all(path, false) 74 | .with_context(|| format!("Error creating workspace directory {}", &path.display()))?; 75 | } 76 | path.canonicalize() 77 | .with_context(|| format!("Error canonicalizing workspace path {}", &path.display())) 78 | } 79 | 80 | #[cfg(test)] 81 | mod tests { 82 | use super::*; 83 | 84 | #[test] 85 | fn test_build_prompt_text() { 86 | // Test with default and show_default true 87 | assert_eq!( 88 | build_prompt_text("Continue?", ": ", true, Some("Y/n")), 89 | "Continue? [Y/n]: " 90 | ); 91 | 92 | // Test with default but show_default false 93 | assert_eq!( 94 | build_prompt_text("Continue?", ": ", false, Some("Y/n")), 95 | "Continue?: " 96 | ); 97 | 98 | // Test without default 99 | assert_eq!( 100 | build_prompt_text("Continue?", ": ", true, None), 101 | "Continue?: " 102 | ); 103 | 104 | // Test with empty text 105 | assert_eq!(build_prompt_text("", ": ", true, Some("Y/n")), " [Y/n]: "); 106 | } 107 | 108 | #[test] 109 | fn test_expand_workspace_path() { 110 | let path = PathBuf::from("/test/path"); 111 | let result = expand_workspace_path(&path).unwrap(); 112 | assert_eq!(result, path); 113 | 114 | // Test with relative path 115 | let relative_path = PathBuf::from("test/path"); 116 | let result = expand_workspace_path(&relative_path).unwrap(); 117 | assert_eq!(result, relative_path); 118 | } 119 | 120 | #[test] 121 | #[cfg(unix)] 122 | fn test_expand_workspace_path_on_unix_platform() { 123 | let custom_home = "/custom/home"; 124 | std::env::set_var("HOME", custom_home); 125 | 126 | let path = PathBuf::from("~/test/path"); 127 | let result = expand_workspace_path(&path).unwrap(); 128 | let expected_path = PathBuf::from(format!("{}/test/path", custom_home)); 129 | 130 | assert_eq!(result, expected_path); 131 | std::env::remove_var("HOME"); // Clean up 132 | } 133 | 134 | #[test] 135 | fn test_ensure_workspace_exists() { 136 | // Test with temporary directory 137 | let temp_dir = tempfile::tempdir().unwrap(); 138 | let path = temp_dir.path().to_path_buf(); 139 | 140 | // Test existing directory 141 | let result = ensure_workspace_dir_exists(&path).unwrap(); 142 | assert_eq!(result, path.canonicalize().unwrap()); 143 | 144 | // Test non-existing directory 145 | let new_path = path.join("new_dir"); 146 | let result = ensure_workspace_dir_exists(&new_path).unwrap(); 147 | assert!(new_path.exists()); 148 | assert_eq!(result, new_path.canonicalize().unwrap()); 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /tests/container/gitea.rs: -------------------------------------------------------------------------------- 1 | use base64::{engine::general_purpose, Engine}; 2 | use git_workspace::providers::APP_USER_AGENT; 3 | use rand::{distributions::Alphanumeric, Rng}; 4 | use reqwest::blocking::{Client, ClientBuilder}; 5 | use reqwest::Certificate; 6 | use serde::Serialize; 7 | use ssh_key::{Algorithm::Ed25519, LineEnding, PrivateKey}; 8 | use std::fs::File; 9 | use std::io::Write; 10 | use std::path::PathBuf; 11 | use std::process::{ExitCode, Termination}; 12 | use tempfile::TempDir; 13 | use testcontainers::core::ExecCommand; 14 | use testcontainers::runners::SyncRunner; 15 | use testcontainers::{Container, ImageExt}; 16 | use testcontainers_modules::gitea::{Gitea, GITEA_HTTP_PORT, GITEA_SSH_PORT}; 17 | 18 | pub struct GiteaContainer { 19 | pub gitea: Container, 20 | pub url: String, 21 | pub username: String, 22 | pub password: String, 23 | pub private_key: String, 24 | token: String, 25 | tls_cert: String, 26 | http_client: Client, 27 | } 28 | 29 | const WORKSPACE_TEMPLATE: &str = r#"[[provider]] 30 | provider = "gitea" 31 | name = "ORG" 32 | url = "https://localhost" 33 | path = "." 34 | env_var = "GITEA_TOKEN" 35 | skip_forks = false 36 | auth_http = true 37 | include = [] 38 | exclude = []"#; 39 | 40 | const GIT_CONFIG_TEMPLATE: &str = r#"[credential] 41 | username = "42" 42 | helper = "!f() { echo 'password=PASSWORD'; }; f" 43 | "#; 44 | 45 | #[derive(Serialize)] 46 | pub struct GiteaCommit { 47 | branch: String, 48 | content: String, 49 | message: String, 50 | } 51 | 52 | impl GiteaCommit { 53 | pub fn new(branch: &str, message: &str, content: &str) -> Self { 54 | let content_base64 = general_purpose::STANDARD.encode(content); 55 | Self { 56 | branch: branch.to_string(), 57 | message: message.to_string(), 58 | content: content_base64, 59 | } 60 | } 61 | } 62 | 63 | /// Represents a containerized Gitea instance for testing purposes 64 | /// with pre-configured settings including authentication, TLS, 65 | /// and API access for testing scenarios. 66 | /// 67 | /// See Gitea API documentaion: 68 | /// - https://gitea.com/api/swagger#/ 69 | impl GiteaContainer { 70 | fn generate_test_ssh_key() -> (String, String) { 71 | let private_key = PrivateKey::random(&mut rand::thread_rng(), Ed25519) 72 | .unwrap_or_else(|_| panic!("Failed to generate key")); 73 | let public_key = private_key.public_key(); 74 | 75 | // Convert to OpenSSH format strings 76 | let private_key_str = private_key 77 | .to_openssh(LineEnding::LF) 78 | .unwrap_or_else(|_| panic!("Failed to serialize private key")); 79 | let public_key_str = public_key 80 | .to_openssh() 81 | .unwrap_or_else(|_| panic!("Failed to serialize public key")); 82 | 83 | (private_key_str.to_string(), public_key_str.to_string()) 84 | } 85 | 86 | /// Starts a new Gitea container instance configured for testing 87 | /// 88 | /// This method: 89 | /// 1. Generates SSH keys for authentication 90 | /// 2. Creates a Gitea container with: 91 | /// - An admin account (user: "42", password: "42") 92 | /// - TLS enabled 93 | /// - Mapped ports for HTTPS (443) and SSH (22) 94 | /// 3. Generates an access token with read/write permissions 95 | /// 96 | /// Returns a configured GiteaContainer instance ready for testing 97 | pub fn start() -> Self { 98 | let (private_key, public_key) = Self::generate_test_ssh_key(); 99 | let (username, password) = ("42".to_string(), "42".to_string()); 100 | let gitea = Gitea::default() 101 | .with_admin_account(&username, &password, Some(public_key)) 102 | .with_tls(true) 103 | .with_mapped_port(443, GITEA_HTTP_PORT) 104 | .with_mapped_port(22, GITEA_SSH_PORT) 105 | .start() 106 | .unwrap_or_else(|_| panic!("to start the container")); 107 | let url = "https://localhost".to_string(); 108 | 109 | // Generate token 110 | let command = ExecCommand::new(vec![ 111 | "/usr/local/bin/gitea", 112 | "admin", 113 | "user", 114 | "generate-access-token", 115 | "--username", 116 | &username, 117 | "--scopes", 118 | "write:organization,write:user,write:repository", 119 | ]); 120 | 121 | // Generate access token 122 | let mut token = String::new(); 123 | gitea 124 | .exec(command) 125 | .unwrap_or_else(|_| panic!("to generate access token")) 126 | .stdout() 127 | .read_to_string(&mut token) 128 | .unwrap(); 129 | let token = token 130 | .split(":") 131 | .nth(1) 132 | .unwrap_or_else(|| panic!("to parse token from output")) 133 | .trim() 134 | .to_string(); 135 | 136 | // Initialize HTTP client for Gitea API requests 137 | let tls_cert = gitea.image().tls_ca().unwrap().to_string(); 138 | let cert = Certificate::from_pem(tls_cert.as_bytes()).unwrap(); 139 | let http_client = ClientBuilder::new() 140 | .https_only(true) 141 | .user_agent(APP_USER_AGENT) 142 | .add_root_certificate(cert) 143 | .use_rustls_tls() 144 | .build() 145 | .unwrap(); 146 | 147 | Self { 148 | gitea, 149 | url, 150 | username, 151 | password, 152 | private_key, 153 | token, 154 | tls_cert, 155 | http_client, 156 | } 157 | } 158 | 159 | /// Creates a temporary file in the workspace directory 160 | /// 161 | /// The file will be automatically removed when calling the reset function 162 | pub fn create_tmp_file(&self, tmp_dir: &TempDir, filepath: &str, content: &str) -> PathBuf { 163 | let config_path = tmp_dir.path().join(filepath); 164 | if let Some(parent) = config_path.parent() { 165 | std::fs::create_dir_all(parent).unwrap(); 166 | } 167 | let mut file = File::create(&config_path).unwrap(); 168 | file.write_all(content.as_bytes()).unwrap(); 169 | config_path 170 | } 171 | 172 | /// Saves the CA certificate to the temporary directory 173 | fn save_ca_certificate(&self, tmp_dir: &TempDir) -> PathBuf { 174 | self.create_tmp_file(tmp_dir, "bundle.pem", self.tls_cert.as_str()) 175 | } 176 | 177 | /// Creates a Git configuration file with test credentials 178 | fn save_git_config(&self, tmp_dir: &TempDir) { 179 | let file_content = GIT_CONFIG_TEMPLATE 180 | .to_string() 181 | .replace("PASSWORD", &self.token); 182 | self.create_tmp_file(tmp_dir, "git/config", &file_content); 183 | } 184 | 185 | fn create_organization(&self, org: &str) { 186 | #[derive(Serialize)] 187 | struct CreateOrg { 188 | username: String, 189 | } 190 | 191 | let url = format!("{}/api/v1/orgs", self.url); 192 | self.http_client 193 | .post(&url) 194 | .bearer_auth(&self.token) 195 | .json(&CreateOrg { 196 | username: org.to_string(), 197 | }) 198 | .send() 199 | .unwrap_or_else(|_| panic!("expect to add org {}", org)) 200 | .error_for_status() 201 | .unwrap_or_else(|_| panic!("expect 2xx http response for creating {} org", org)); 202 | } 203 | 204 | /// Sets up the test environment for Gitea integration tests 205 | /// 206 | /// This method: 207 | /// 1. Creates a temporary workspace directory for test files 208 | /// 2. Creates a random organization in Gitea 209 | /// 3. Create workspace.toml config 210 | /// 2. Sets up SSL certificate 211 | /// 3. Configures API authentification by setting GITEA_TOKEN environment variable 212 | /// 4. Sets up Git authentication with isolated config (no user/system settings) 213 | /// 214 | /// See Git documentation for details on isolated config: 215 | /// - https://git-scm.com/book/ms/v2/Git-Internals-Environment-Variables 216 | /// - https://git-scm.com/docs/git-config#ENVIRONMENT) 217 | /// 218 | /// Returns a tuple containing: 219 | /// - TempDir: The temporary workspace directory 220 | /// - String: The name of the created organization 221 | pub fn setup(&self) -> (TempDir, String) { 222 | let tmp_dir = TempDir::new().unwrap(); 223 | 224 | let org_name: String = rand::thread_rng() 225 | .sample_iter(Alphanumeric) 226 | .take(8) // Adjust length as needed 227 | .map(char::from) 228 | .collect(); 229 | self.create_organization(&org_name); 230 | 231 | let config_content = WORKSPACE_TEMPLATE.replace("ORG", &org_name); 232 | self.create_tmp_file(&tmp_dir, "workspace.toml", &config_content); 233 | 234 | let cert_path = self.save_ca_certificate(&tmp_dir); 235 | std::env::set_var("SSL_CERT_FILE", cert_path); 236 | 237 | std::env::set_var("GITEA_TOKEN", &self.token); 238 | 239 | self.save_git_config(&tmp_dir); 240 | std::env::set_var("XDG_CONFIG_HOME", tmp_dir.path()); 241 | std::env::set_var("GIT_SSL_NO_VERIFY", "true"); 242 | std::env::set_var("GIT_CONFIG_NOSYSTEM", "true"); 243 | 244 | println!( 245 | "\nCreate org {} and tmp workspace directory {}", 246 | &org_name, 247 | tmp_dir.path().display(), 248 | ); 249 | 250 | (tmp_dir, org_name) 251 | } 252 | 253 | /// Creates multiple repositories on the Gitea organization 254 | pub fn add_repos(&self, org_name: &str, repos: R) 255 | where 256 | T: AsRef, 257 | R: IntoIterator, 258 | { 259 | #[derive(Serialize)] 260 | struct CreateRepo { 261 | name: String, 262 | } 263 | 264 | let url = format!("{}/api/v1/orgs/{}/repos", self.url, org_name); 265 | for repo in repos { 266 | self.http_client 267 | .post(&url) 268 | .bearer_auth(&self.token) 269 | .json(&CreateRepo { 270 | name: repo.as_ref().to_string(), 271 | }) 272 | .send() 273 | .unwrap_or_else(|_| panic!("expect to add repo {}", repo.as_ref())) 274 | .error_for_status() 275 | .unwrap_or_else(|_| { 276 | panic!( 277 | "expect 2xx http response for creating {} repo", 278 | repo.as_ref() 279 | ) 280 | }); 281 | } 282 | } 283 | 284 | /// Deletes multiple repositories from the Gitea organization 285 | pub fn delete_repos(&self, org_name: &str, repos: R) 286 | where 287 | T: AsRef, 288 | R: IntoIterator, 289 | { 290 | for repo in repos { 291 | let url = format!("{}/api/v1/repos/{}/{}", self.url, org_name, repo.as_ref()); 292 | self.http_client 293 | .delete(&url) 294 | .bearer_auth(&self.token) 295 | .send() 296 | .unwrap_or_else(|_| panic!("expect to delete repo {}", repo.as_ref())) 297 | .error_for_status() 298 | .unwrap_or_else(|_| { 299 | panic!( 300 | "expect 2xx http response for deleting {} repo", 301 | repo.as_ref() 302 | ) 303 | }); 304 | } 305 | } 306 | 307 | /// Creates a new commit in the specified repository with the given file contents 308 | pub fn commit_to_repo(&self, org_name: &str, repo: &str, filepath: &str, body: &GiteaCommit) { 309 | let url = format!( 310 | "{}/api/v1/repos/{}/{}/contents/{}", 311 | self.url, org_name, repo, filepath 312 | ); 313 | self.http_client 314 | .post(&url) 315 | .bearer_auth(&self.token) 316 | .json(body) 317 | .send() 318 | .unwrap_or_else(|_| panic!("expect to create new commit for repo {}", repo)) 319 | .error_for_status() 320 | .unwrap_or_else(|_| { 321 | panic!( 322 | "expect 2xx http response when creating new commit on {} repo", 323 | repo 324 | ) 325 | }); 326 | } 327 | 328 | /// Resets the test environment by removing the temporary folder on the system. 329 | /// 330 | /// Notes: 331 | /// - organization and repositories created during tests are not removed 332 | /// as they use unique names 333 | /// - environment variables are not cleared 334 | pub fn reset(&self, tmp_dir: TempDir) { 335 | tmp_dir.close().unwrap(); 336 | } 337 | } 338 | 339 | impl Termination for &'static GiteaContainer { 340 | fn report(self) -> ExitCode { 341 | ExitCode::SUCCESS 342 | } 343 | } 344 | -------------------------------------------------------------------------------- /tests/container/mod.rs: -------------------------------------------------------------------------------- 1 | mod gitea; 2 | 3 | pub use gitea::{GiteaCommit, GiteaContainer}; 4 | -------------------------------------------------------------------------------- /tests/gitea_tests.rs: -------------------------------------------------------------------------------- 1 | mod container; 2 | 3 | use container::{GiteaCommit, GiteaContainer}; 4 | use git_workspace::commands::{archive, execute_cmd, fetch, lock, update}; 5 | use rstest::*; 6 | use std::{ 7 | fs::{read_to_string, remove_dir_all}, 8 | path::Path, 9 | sync::OnceLock, 10 | }; 11 | 12 | // Container for gitea test instance, initialized once across all tests 13 | static GITEA_CONTAINER: OnceLock = OnceLock::new(); 14 | 15 | // Fixture to get or start container if not started 16 | #[fixture] 17 | pub fn gitea_container() -> &'static GiteaContainer { 18 | GITEA_CONTAINER.get_or_init(GiteaContainer::start) 19 | } 20 | 21 | // Remove gitea container at end of tests 22 | // Comment out this function for debugging to keep container running after tests 23 | #[ctor::dtor] 24 | fn cleanup() { 25 | let container = &GITEA_CONTAINER as *const _ as *mut OnceLock; 26 | // Safety: We have exclusive access to GITEA_CONTAINER during cleanup/destruction 27 | if let Some(container) = unsafe { (*container).take() } { 28 | println!("Remove Gitea container...\n"); 29 | drop(container); 30 | } 31 | } 32 | 33 | fn update_command(workspace_path: &Path) { 34 | lock(workspace_path).unwrap(); 35 | update(workspace_path, 8).unwrap(); 36 | } 37 | 38 | fn execute_command(workspace_path: &Path, cmd: &str, args_raw: &str) { 39 | let args: Vec = args_raw.split(" ").map(String::from).collect(); 40 | execute_cmd(workspace_path, 8, cmd.to_string(), args).unwrap(); 41 | } 42 | 43 | #[rstest] 44 | fn test_update_command(gitea_container: &GiteaContainer) { 45 | // Setup environment 46 | let (tmp_dir, org_name) = gitea_container.setup(); 47 | let workspace = tmp_dir.path(); 48 | 49 | // Test update command 50 | gitea_container.add_repos(&org_name, ["repo1", "repo2"]); 51 | update_command(workspace); 52 | 53 | // Check if repo1/2 exists 54 | let repo1 = format!("{}/repo1/.git/config", org_name); 55 | let repo2 = format!("{}/repo2/.git/config", org_name); 56 | assert!(workspace.join(&repo1).exists(), "{} does not exist", &repo1); 57 | assert!(workspace.join(&repo2).exists(), "{} does not exist", &repo2); 58 | 59 | // Test with new repo add on Gitea server 60 | gitea_container.add_repos(&org_name, ["repo3"]); 61 | update_command(workspace); 62 | 63 | // Check if repo3 exists 64 | let repo3 = format!("{}/repo3/.git/config", org_name); 65 | assert!(workspace.join(&repo3).exists(), "{} does not exist", &repo3); 66 | 67 | // Test with removed local repo2 68 | let repo2_path = workspace.join(format!("{}/repo2", org_name)); 69 | remove_dir_all(&repo2_path).unwrap(); 70 | assert!(!repo2_path.exists(),); 71 | update_command(workspace); 72 | 73 | // Check if repo2 still exists 74 | assert!(workspace.join(&repo2).exists(), "{} does not exist", &repo2); 75 | 76 | gitea_container.reset(tmp_dir); 77 | } 78 | 79 | #[rstest] 80 | fn test_archive_command(gitea_container: &GiteaContainer) { 81 | // Setup environment 82 | let (tmp_dir, org_name) = gitea_container.setup(); 83 | let workspace = tmp_dir.path(); 84 | gitea_container.add_repos(&org_name, ["repo1", "repo2", "repo3"]); 85 | update_command(workspace); 86 | 87 | // Test archive command 88 | gitea_container.delete_repos(&org_name, ["repo2"]); 89 | archive(workspace, true).unwrap(); 90 | 91 | // Check if .git/config exists for repo2 is in the .archive directory 92 | let repo2 = workspace.join(format!(".archive/{}/repo2/.git/config", org_name)); 93 | assert!(repo2.exists(), "{} does not exist", repo2.display()); 94 | 95 | gitea_container.reset(tmp_dir); 96 | } 97 | 98 | #[rstest] 99 | fn test_fetch_and_run_commands(gitea_container: &GiteaContainer) { 100 | // Setup environment 101 | let (tmp_dir, org_name) = gitea_container.setup(); 102 | let workspace = tmp_dir.path(); 103 | gitea_container.add_repos(&org_name, ["repo1", "repo2"]); 104 | update_command(workspace); 105 | 106 | // Test fetch and run commands 107 | let content = "Hello Orf".to_string(); 108 | let commit = GiteaCommit::new("main", "chore: initial commit", "Hello Orf"); 109 | gitea_container.commit_to_repo(&org_name, "repo1", "README.md", &commit); 110 | fetch(workspace, 8).unwrap(); 111 | execute_command(workspace, "git", "merge origin/main"); 112 | 113 | let org_dir = workspace.join(&org_name).join("repo1"); 114 | println!("Files in {}:", org_dir.display()); 115 | for entry in std::fs::read_dir(org_dir).unwrap() { 116 | let entry = entry.unwrap(); 117 | println!("{}", entry.path().display()); 118 | } 119 | 120 | // Check that README.md file is present on main branch of repo1 121 | let repo_path = workspace.join(format!("{}/{}/.git/HEAD", org_name, "repo1")); 122 | let readme_path = workspace.join(format!("{}/{}/README.md", org_name, "repo1")); 123 | let branch = read_to_string(&repo_path).unwrap(); 124 | let readme = read_to_string(&readme_path).unwrap(); 125 | assert_eq!(branch.trim(), "ref: refs/heads/main"); 126 | assert_eq!(readme, content); 127 | } 128 | --------------------------------------------------------------------------------