├── .github ├── FUNDING.yml └── workflows │ ├── gh-pages.yml │ ├── nix.yml │ └── release.yml ├── .gitignore ├── CONTRIBUTING.md ├── CONTRIBUTORS ├── Cargo.lock ├── Cargo.toml ├── Justfile ├── LICENSE ├── README.md ├── build.rs ├── dist-workspace.toml ├── docs ├── .gitignore ├── book.toml └── src │ ├── SUMMARY.md │ ├── dependency_updates.md │ ├── developing.md │ ├── documentation.md │ ├── faq.md │ ├── forge_integration.md │ ├── formatting_and_style.md │ ├── installation.md │ ├── local_configuration.md │ ├── overview.md │ ├── releases.md │ ├── repos.md │ ├── testing.md │ ├── tutorial.md │ ├── worktree_behavior.md │ ├── worktree_remotes.md │ ├── worktree_working.md │ └── worktrees.md ├── e2e_tests ├── .gitignore ├── conftest.py ├── docker-compose.yml ├── docker-rest │ ├── Dockerfile │ └── flask │ │ ├── app.py │ │ ├── github.py │ │ ├── github_api_page_1.json.j2 │ │ ├── github_api_page_2.json.j2 │ │ ├── github_api_page_3.json.j2 │ │ ├── github_api_page_4.json.j2 │ │ ├── github_api_user.json │ │ ├── gitlab.py │ │ ├── gitlab_api_page_1.json │ │ ├── gitlab_api_page_2.json │ │ ├── gitlab_api_page_3.json │ │ ├── gitlab_api_page_4.json │ │ └── gitlab_api_user.json ├── docker │ └── Dockerfile ├── helpers.py ├── test_basic.py ├── test_repos_find.py ├── test_repos_find_remote.py ├── test_repos_status.py ├── test_repos_sync.py ├── test_worktree_clean.py ├── test_worktree_config_presistent_branch.py ├── test_worktree_conversion.py ├── test_worktree_fetch.py ├── test_worktree_rebase.py ├── test_worktree_status.py └── test_worktrees.py ├── example.config.toml ├── example.config.yaml ├── flake.lock ├── flake.nix ├── pkg └── arch │ ├── .SRCINFO │ ├── .gitignore │ └── PKGBUILD ├── release.sh ├── rust-toolchain.toml ├── src ├── auth.rs ├── config.rs ├── grm │ ├── cmd.rs │ └── main.rs ├── lib.rs ├── output.rs ├── path.rs ├── provider │ ├── github.rs │ ├── gitlab.rs │ └── mod.rs ├── repo.rs ├── table.rs ├── tree.rs └── worktree.rs ├── tests ├── helpers.rs └── repo.rs ├── update-cargo-dependencies.py └── update-pkgbuild.sh /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: hakoerber 2 | -------------------------------------------------------------------------------- /.github/workflows/gh-pages.yml: -------------------------------------------------------------------------------- 1 | name: github pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | deploy: 11 | runs-on: ubuntu-20.04 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | steps: 15 | - uses: actions/checkout@v2 16 | 17 | - name: Setup mdBook 18 | uses: peaceiris/actions-mdbook@v1 19 | with: 20 | mdbook-version: 'latest' 21 | 22 | - run: cd ./docs && mdbook build 23 | 24 | - name: Deploy 25 | uses: peaceiris/actions-gh-pages@v3 26 | if: ${{ github.ref == 'refs/heads/master' }} 27 | with: 28 | github_token: ${{ secrets.GITHUB_TOKEN }} 29 | publish_dir: ./docs/book 30 | -------------------------------------------------------------------------------- /.github/workflows/nix.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Nix Flake Check 3 | 4 | on: # yamllint disable-line rule:truthy 5 | pull_request: 6 | branches: 7 | - master 8 | push: 9 | branches: 10 | - master 11 | - develop 12 | 13 | jobs: 14 | nix-flake-check: 15 | name: Run Nix Flake Checks 16 | runs-on: ubuntu-latest 17 | environment: actions_build_environment 18 | steps: 19 | - uses: actions/checkout@main 20 | - uses: cachix/install-nix-action@master 21 | - run: nix --accept-flake-config flake check 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | result 3 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Check out [the developer 2 | documentation](https://hakoerber.github.io/git-repo-manager/developing.html) it 3 | you want to contribute! 4 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | nonnominandus 2 | Maximilian Volk 3 | Baptiste (@BapRx) 4 | Sirio Balmelli 5 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "git-repo-manager" 3 | version = "0.7.22" 4 | edition = "2021" 5 | 6 | authors = [ 7 | "Hannes Körber ", 8 | ] 9 | description = """ 10 | Manage multiple git repositories. 11 | You configure the git repositories in a file, the program does the rest! 12 | """ 13 | 14 | keywords = ["git"] 15 | categories = [ 16 | "command-line-utilities", 17 | "development-tools", 18 | ] 19 | 20 | homepage = "https://github.com/hakoerber/git-repo-manager" 21 | repository = "https://github.com/hakoerber/git-repo-manager" 22 | 23 | readme = "README.md" 24 | 25 | rust-version = "1.74" 26 | 27 | license = "GPL-3.0-only" 28 | 29 | [profile.e2e-tests] 30 | inherits = "dev" 31 | 32 | # The profile that 'dist' will build with 33 | [profile.dist] 34 | inherits = "release" 35 | lto = "thin" 36 | 37 | [lib] 38 | name = "grm" 39 | path = "src/lib.rs" 40 | 41 | [[bin]] 42 | name = "grm" 43 | path = "src/grm/main.rs" 44 | 45 | [dependencies.toml] 46 | version = "=0.8.19" 47 | 48 | [dependencies.serde] 49 | version = "=1.0.215" 50 | features = ["derive"] 51 | 52 | [dependencies.git2] 53 | version = "=0.19.0" 54 | 55 | [dependencies.shellexpand] 56 | version = "=3.1.0" 57 | 58 | [dependencies.clap] 59 | version = "=4.5.21" 60 | features = ["derive", "cargo"] 61 | 62 | [dependencies.console] 63 | version = "=0.15.8" 64 | 65 | [dependencies.regex] 66 | version = "=1.11.1" 67 | 68 | [dependencies.comfy-table] 69 | version = "=7.1.3" 70 | 71 | [dependencies.serde_yaml] 72 | version = "=0.9.34" 73 | 74 | [dependencies.serde_json] 75 | version = "=1.0.133" 76 | 77 | [dependencies.ureq] 78 | version = "=2.11.0" 79 | features = ["json"] 80 | 81 | [dependencies.parse_link_header] 82 | version = "=0.4.0" 83 | 84 | [dependencies.url-escape] 85 | version = "=0.1.1" 86 | 87 | [dev-dependencies.outdir-tempdir] 88 | version = "=0.2.0" 89 | 90 | [features] 91 | static-build = [ 92 | "git2/vendored-openssl", 93 | "git2/vendored-libgit2", 94 | ] 95 | -------------------------------------------------------------------------------- /Justfile: -------------------------------------------------------------------------------- 1 | set positional-arguments 2 | 3 | set shell := ["/bin/bash", "-c"] 4 | 5 | static_target := "x86_64-unknown-linux-musl" 6 | 7 | cargo := "cargo" 8 | 9 | check: fmt-check lint test 10 | {{cargo}} check 11 | 12 | clean: 13 | {{cargo}} clean 14 | git clean -f -d -X 15 | 16 | fmt: 17 | {{cargo}} fmt 18 | git ls-files | grep '\.py$' | xargs isort 19 | git ls-files | grep '\.py$' | xargs black 20 | git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --write 21 | 22 | fmt-check: 23 | {{cargo}} fmt --check 24 | git ls-files | grep '\.py$' | xargs black --check 25 | git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --diff 26 | 27 | lint: 28 | {{cargo}} clippy --no-deps -- -Dwarnings 29 | git ls-files | grep '\.py$' | xargs ruff check --ignore E501 30 | git ls-files | grep '\.sh$' | xargs -L 1 shellcheck --norc 31 | 32 | lint-fix: 33 | {{cargo}} clippy --no-deps --fix 34 | 35 | build-release: 36 | {{cargo}} build --release 37 | 38 | build-release-static: 39 | {{cargo}} build --release --target {{static_target}} --features=static-build 40 | 41 | pushall: 42 | for r in $(git remote) ; do \ 43 | for branch in develop master ; do \ 44 | git push $r $branch ; \ 45 | done ; \ 46 | done 47 | 48 | release-patch: 49 | ./release.sh patch 50 | 51 | test-binary: 52 | env \ 53 | GITHUB_API_BASEURL=http://rest:5000/github \ 54 | GITLAB_API_BASEURL=http://rest:5000/gitlab \ 55 | {{cargo}} build --profile e2e-tests --target {{static_target}} --features=static-build 56 | 57 | install: 58 | {{cargo}} install --path . 59 | 60 | install-static: 61 | {{cargo}} install --target {{static_target}} --features=static-build --path . 62 | 63 | build: 64 | {{cargo}} build 65 | 66 | build-static: 67 | {{cargo}} build --target {{static_target}} --features=static-build 68 | 69 | test: test-unit test-integration test-e2e 70 | 71 | test-unit +tests="": 72 | {{cargo}} test --lib --bins -- --show-output {{tests}} 73 | 74 | test-integration: 75 | {{cargo}} test --test "*" 76 | 77 | test-e2e +tests=".": test-binary 78 | cd ./e2e_tests \ 79 | && docker compose rm --stop -f \ 80 | && docker compose build \ 81 | && docker compose run \ 82 | --rm \ 83 | -v $PWD/../target/x86_64-unknown-linux-musl/e2e-tests/grm:/grm \ 84 | pytest \ 85 | "GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest --exitfirst -p no:cacheprovider --color=yes "$@"" \ 86 | && docker compose rm --stop -f 87 | 88 | update-dependencies: update-cargo-dependencies 89 | 90 | update-cargo-dependencies: 91 | ./update-cargo-dependencies.py 92 | 93 | wait: 94 | read -p "[ENTER] to continue " 95 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GRM — Git Repository Manager ![Nix Flake Check](https://github.com/hakoerber/git-repo-manager/workflows/Nix%20Flake%20Check/badge.svg) 2 | 3 | GRM helps you manage git repositories in a declarative way. Configure your 4 | repositories in a [TOML](https://toml.io/) or YAML file, GRM does the rest. 5 | 6 | Also, GRM can be used to work with git worktrees in an opinionated, 7 | straightforward fashion. 8 | 9 | **Take a look at the [official documentation](https://hakoerber.github.io/git-repo-manager/) 10 | for installation & quickstart.** 11 | 12 | # Why? 13 | 14 | I have a **lot** of repositories on my machines. My own stuff, forks, quick 15 | clones of other's repositories, projects that never went anywhere ... In short, 16 | I lost overview. 17 | 18 | To sync these repositories between machines, I've been using Nextcloud. The thing 19 | is, Nextcloud is not too happy about too many small files that change all the time, 20 | like the files inside `.git`. Git also assumes that those files are updated as 21 | atomically as possible. Nextcloud cannot guarantee that, so when I do a `git status` 22 | during a sync, something blows up. And resolving these conflicts is just no fun ... 23 | 24 | In the end, I think that git repos just don't belong into something like Nextcloud. 25 | Git is already managing the content & versions, so there is no point in having 26 | another tool do the same. But of course, setting up all those repositories from 27 | scratch on a new machine is too much hassle. What if there was a way to clone all 28 | those repos in a single command? 29 | 30 | Also, I once transferred the domain of my personal git server. I updated a few 31 | remotes manually, but I still stumble upon old, stale remotes in projects that 32 | I haven't touched in a while. What if there was a way to update all those remotes 33 | in once place? 34 | 35 | This is how GRM came to be. I'm a fan of infrastructure-as-code, and GRM is a bit 36 | like Terraform for your local git repositories. Write a config, run the tool, and 37 | your repos are ready. The only thing that is tracked by git is the list of 38 | repositories itself. 39 | 40 | # Crates 41 | 42 | * [`toml`](https://docs.rs/toml/) for the configuration file. 43 | * [`serde`](https://docs.rs/serde/), together with 44 | [`serde_yaml`](https://docs.rs/serde_yaml/) and 45 | [`serde_json`](https://docs.rs/serde_json/). Because we're using Rust, after 46 | all. 47 | * [`git2`](https://docs.rs/git2/), a safe wrapper around `libgit2`, for all git operations. 48 | * [`clap`](https://docs.rs/clap/), [`console`](https://docs.rs/console/), [`comfy_table`](https://docs.rs/comfy-table/) and [`shellexpand`](https://docs.rs/shellexpand) for good UX. 49 | * [`isahc`](https://docs.rs/isahc/) as the HTTP client for forge integrations. 50 | 51 | # Links 52 | 53 | * [crates.io](https://crates.io/crates/git-repo-manager) 54 | 55 | # Mirrors 56 | 57 | This repository can be found on multiple forges: 58 | 59 | * https://github.com/hakoerber/git-repo-manager 60 | * https://code.hkoerber.de/hannes/git-repo-manager/ 61 | * https://codeberg.org/hakoerber/git-repo-manager 62 | * https://git.sr.ht/~hkoerber/git-repo-manager 63 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | if let Ok(v) = std::env::var("GRM_RELEASE_VERSION") { 3 | println!("cargo:rustc-env=CARGO_PKG_VERSION={v}"); 4 | } 5 | println!("cargo:rerun-if-env-changed=GRM_RELEASE_VERSION"); 6 | } 7 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.25.1" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = [] 12 | # Target platforms to build apps for (Rust target-triple syntax) 13 | targets = ["x86_64-unknown-linux-musl"] 14 | features = ["static-build"] 15 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | book 2 | -------------------------------------------------------------------------------- /docs/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | authors = ["Hannes Körber"] 3 | language = "en" 4 | multilingual = false 5 | src = "src" 6 | title = "Git Repo Manager" 7 | 8 | [output.html] 9 | mathjax-support = true 10 | 11 | # [output.linkcheck] 12 | # follow-web-links = true 13 | # traverse-parent-directories = false 14 | # warning-policy = "error" 15 | -------------------------------------------------------------------------------- /docs/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | [Overview](./overview.md) 4 | 5 | - [Installation](./installation.md) 6 | - [Tutorial](./tutorial.md) 7 | - [Managing Repositories](./repos.md) 8 | - [Local Configuration](./local_configuration.md) 9 | - [Forge Integrations](./forge_integration.md) 10 | - [Git Worktrees](./worktrees.md) 11 | - [Working with Worktrees](./worktree_working.md) 12 | - [Worktrees and Remotes](./worktree_remotes.md) 13 | - [Behavior Details](./worktree_behavior.md) 14 | - [FAQ](./faq.md) 15 | - [Developer Documentation](./developing.md) 16 | - [Testing](./testing.md) 17 | - [Dependency updates](./dependency_updates.md) 18 | - [Releases](./releases.md) 19 | - [Formatting & Style](./formatting_and_style.md) 20 | - [The Docs Themselves](./documentation.md) 21 | -------------------------------------------------------------------------------- /docs/src/dependency_updates.md: -------------------------------------------------------------------------------- 1 | # Dependency updates 2 | 3 | Rust has the same problem as the node ecosystem, just a few magnitudes smaller: 4 | Dependency sprawl. GRM has a dozen direct dependencies, but over 150 transitive 5 | ones. 6 | 7 | To keep them up to date, there is a script: 8 | `update-cargo-dependencies.py`. It updates direct dependencies to the 9 | latest stable version and updates transitive dependencies where possible. To run 10 | it, use `just update-dependencies`, which will create commits for each update. 11 | -------------------------------------------------------------------------------- /docs/src/developing.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | GRM is still in very early development. I started GRM mainly to scratch my own 4 | itches (and am heavily dogfooding it). If you have a new use case for GRM, go 5 | for it! 6 | 7 | ## Contributing 8 | 9 | To contribute, just fork the repo and create a pull request against `develop`. 10 | If you plan bigger changes, please consider opening an issue first, so we can 11 | discuss it. 12 | 13 | If you want, add yourself to the `CONTRIBUTORS` file in your pull request. 14 | 15 | ## Branching strategy 16 | 17 | The branching strategy is a simplified 18 | [git-flow](https://nvie.com/posts/a-successful-git-branching-model/). 19 | 20 | * `master` is the "production" branch. Each commit is a new release. 21 | * `develop` is the branch where new stuff is coming in. 22 | * feature branches branch off of `develop` and merge back into it. 23 | 24 | Feature branches are not required, there are also changes happening directly on 25 | `develop`. 26 | 27 | ## Required tooling 28 | 29 | You will need the following tools: 30 | 31 | * Rust (obviously) (easiest via `rustup`) 32 | * Python3 33 | * [`just`](https://github.com/casey/just), a command runner like `make`. See 34 | [here](https://github.com/casey/just#installation) for installation 35 | instructions (it's most likely just a simple `cargo install just`). 36 | * Docker & docker-compose for the e2e tests 37 | * `isort`, `black` and `shfmt` for formatting. 38 | * `ruff` and `shellcheck` for linting. 39 | * `python-tomlkit` for the dependency update script. 40 | * `mdbook` for the documentation 41 | 42 | Here are the tools: 43 | 44 | | Distribution | Command | 45 | | ------------- | ------------------------------------------------------------------------------------------------------------------ | 46 | | Arch Linux | `pacman -S --needed python3 rustup just docker docker-compose python-black shfmt shellcheck mdbook python-tomlkit` | 47 | | Ubuntu/Debian | `apt-get install --no-install-recommends python3 docker.io docker-compose black shellcheck python3-tomlkit` | 48 | 49 | Note that you will have to install `just` and `mdbook` manually on Ubuntu (e.g. 50 | via `cargo install just mdbook` if your rust build environment is set up 51 | correctly). Same for `shfmt`, which may just be a `go install 52 | mvdan.cc/sh/v3/cmd/shfmt@latest`, depending on your go build environment. 53 | 54 | For details about rustup and the toolchains, see [the installation 55 | section](./installation.md). 56 | 57 | ## Development Environment with [Nix](https://nixos.org) 58 | 59 | Enter a development shell with all tools and dependencies: 60 | 61 | ```bash 62 | $ nix develop 63 | ``` 64 | 65 | From within the nix shell: 66 | 67 | ```bash 68 | $ just [TARGET] 69 | ``` 70 | 71 | or 72 | 73 | ```bash 74 | $ cargo build 75 | ``` 76 | 77 | Update toolchain and dependencies: 78 | 79 | ```bash 80 | $ nix flake update 81 | ``` 82 | 83 | Build: 84 | 85 | ```bash 86 | $ nix build 87 | ``` 88 | 89 | Run: 90 | 91 | ```bash 92 | $ nix run . -- [ARGUMENTS] 93 | ``` 94 | 95 | Find more documentation about Nix Flakes here: https://nixos.wiki/wiki/Flakes 96 | 97 | ### Caveats 98 | 99 | The current Nix environment does not source: 100 | 101 | - aarch64-unknown-linux-musl 102 | - x86_64-unknown-linux-musl 103 | - docker and related tools 104 | 105 | If interest develops this can be added. 106 | 107 | ### Developing Nix 108 | 109 | The crate is built using [Crane](https://github.com/ipetkov/crane). 110 | 111 | Format code with [alejandra](https://github.com/kamadorueda/alejandra). 112 | -------------------------------------------------------------------------------- /docs/src/documentation.md: -------------------------------------------------------------------------------- 1 | # Documentation 2 | 3 | The documentation lives in the `docs` folder and uses 4 | [mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing 5 | features here! 6 | 7 | Using [GitHub actions](https://github.com/features/actions), the documentation 8 | on `master` is automatically published to [the project 9 | homepage](https://hakoerber.github.io/git-repo-manager/) via GitHub pages. See 10 | `.github/workflows/gh-pages.yml` for the configuration of GitHub Actions. 11 | 12 | -------------------------------------------------------------------------------- /docs/src/faq.md: -------------------------------------------------------------------------------- 1 | # FAQ 2 | 3 | Currently empty, as there are no questions that are asked frequently :D 4 | -------------------------------------------------------------------------------- /docs/src/forge_integration.md: -------------------------------------------------------------------------------- 1 | # Forge Integrations 2 | 3 | In addition to managing repositories locally, `grm` also integrates with source 4 | code hosting platforms. Right now, the following platforms are supported: 5 | 6 | * [GitHub](https://github.com/) 7 | * [GitLab](https://gitlab.com/) 8 | 9 | Imagine you are just starting out with `grm` and want to clone all your 10 | repositories from GitHub. This is as simple as: 11 | 12 | ```bash 13 | $ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects 14 | ``` 15 | 16 | You will end up with your projects cloned into 17 | `~/projects/{your_github_username}/` 18 | 19 | ## Authentication 20 | 21 | The only currently supported authentication option is using a personal access 22 | token. 23 | 24 | ### GitHub 25 | 26 | See the GitHub documentation for personal access tokens: 27 | [Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token). 28 | 29 | When using a fine-grained access token, only "Read" access for the "Metadata" permissions are required (cloning will use 30 | unauthorized HTTPS for public repos or SSH for private repos, not the personal access token!) 31 | 32 | ### GitLab 33 | 34 | See the GitLab documentation for personal access tokens: 35 | [Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html). 36 | 37 | The required scopes are a bit weird. Actually, the following should suffice: 38 | 39 | * `read_user` to get user information (required to get the current 40 | authenticated user name for the `--owner` filter. 41 | * A scope that allows reading private repositories. (`read_repository` is just 42 | for *cloning* private repos). This unfortunately does not exist. 43 | 44 | So currently, you'll need to select the `read_api` scope. 45 | 46 | ## Filters 47 | 48 | By default, `grm` will sync **nothing**. This is quite boring, so you have to 49 | tell the command what repositories to include. They are all inclusive (i.e. act 50 | as a logical OR), so you can easily chain many filters to clone a bunch of 51 | repositories. It's quite simple: 52 | 53 | * `--user ` syncs all repositories of that remote user 54 | * `--group ` syncs all repositories of that remote group/organization 55 | * `--owner` syncs all repositories of the user that is used for authentication. 56 | This is effectively a shortcut for `--user $YOUR_USER` 57 | * `--access` syncs all repositories that the current user has access to 58 | 59 | Easiest to see in an example: 60 | 61 | ```bash 62 | $ grm repos sync remote --provider github --user torvals --owner --group zalando [...] 63 | ``` 64 | 65 | This would sync all of Torvald's repositories, all of my own repositories and 66 | all (public) repositories in the "zalando" group. 67 | 68 | ## Strategies 69 | 70 | There are generally three ways how you can use `grm` with forges: 71 | 72 | ### Ad-hoc cloning 73 | 74 | This is the easiest, there are no local files involved. You just run the 75 | command, `grm` clones the repos, that's it. If you run the command again, `grm` 76 | will figure out the differences between local and remote repositories and 77 | resolve them locally. 78 | 79 | ### Create a file 80 | 81 | This is effectively `grm repos find local`, but using the forge instead of the 82 | local file system. You will end up with a normal repository file that you can 83 | commit to git. To update the list of repositories, just run the command again 84 | and commit the new file. 85 | 86 | ### Define options in a file 87 | 88 | This is a hybrid approach: You define filtering options in a file that you can 89 | commit to source control. Effectively, you are persisting the options you gave 90 | to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will 91 | figure out differences between local and remote and resolve them. 92 | 93 | A file would look like this: 94 | 95 | ```toml 96 | provider = "github" 97 | token_command = "cat ~/.github_token" 98 | root = "~/projects" 99 | 100 | [filters] 101 | owner = true 102 | groups = [ 103 | "zalando" 104 | ] 105 | ``` 106 | 107 | The options in the file map to the command line options of the `grm repos sync 108 | remote` command. 109 | 110 | You'd then run the `grm repos sync` command the same way as with a list of 111 | repositories in a configuration: 112 | 113 | ```bash 114 | $ grm repos sync --config example.config.toml 115 | ``` 116 | 117 | You can even use that file to generate a repository list that you can feed into 118 | `grm repos sync`: 119 | 120 | ```bash 121 | $ grm repos find config --config example.config.toml > repos.toml 122 | $ grm repos sync config --config repos.toml 123 | ``` 124 | 125 | ## Using with self-hosted GitLab 126 | 127 | By default, `grm` uses the default GitLab API endpoint 128 | ([https://gitlab.com](https://gitlab.com)). You can override the endpoint by 129 | specifying the `--api-url` parameter. Like this: 130 | 131 | ```bash 132 | $ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...] 133 | ``` 134 | 135 | ## The cloning protocol 136 | 137 | By default, `grm` will use HTTPS for public repositories and SSH otherwise. This 138 | can be overridden with the `--force-ssh` switch. 139 | 140 | ## About the token command 141 | 142 | To ensure maximum flexibility, `grm` has a single way to get the token it uses 143 | to authenticate: Specify a command that returns the token via stdout. This 144 | easily integrates with password managers like 145 | [`pass`](https://www.passwordstore.org/). 146 | 147 | Of course, you are also free to specify something like `echo mytoken` as the 148 | command, as long as you are OK with the security implications (like having the 149 | token in clear text in your shell history). It may be better to have the token 150 | in a file instead and read it: `cat ~/.gitlab_token`. 151 | 152 | Generally, use whatever you want. The command just has to return successfully 153 | and return the token as the first line of stdout. 154 | 155 | ## Examples 156 | 157 | Maybe you just want to locally clone all repos from your GitHub user? 158 | 159 | ```bash 160 | $ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token" 161 | ``` 162 | 163 | This will clone all repositories into 164 | `~/github_projects/{your_github_username}`. 165 | 166 | If instead you want to clone **all** repositories you have access to (e.g. via 167 | organizations or other users' private repos you have access to), just change the 168 | filter a little bit: 169 | 170 | ```bash 171 | $ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token" 172 | ``` 173 | 174 | ## Limitations 175 | 176 | ### GitHub 177 | 178 | Unfortunately, GitHub does not have a nice API endpoint to get **private** 179 | repositories for a certain user 180 | ([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) 181 | only returns public repositories). 182 | 183 | Therefore, using `--user {user}` will only show public repositories for GitHub. 184 | Note that this does not apply to `--access`: If you have access to another 185 | user's private repository, it will be listed. 186 | 187 | ## Adding integrations 188 | 189 | Adding a new integration involves writing some Rust code. Most of the logic is 190 | generic, so you will not have to reinvent the wheel. Generally, you will need to 191 | gather the following information: 192 | 193 | * A list of repositories for a single user 194 | * A list of repositories for a group (or any similar concept if applicable) 195 | * A list of repositories for the user that the API token belongs to 196 | * The username of the currently authenticated user 197 | 198 | Authentication currently only works via a bearer token passed via the 199 | `Authorization` HTTP header. 200 | 201 | Each repo has to have the following properties: 202 | 203 | * A name (which also acts as the identifier for diff between local and remote 204 | repositories) 205 | * An SSH URL to push to 206 | * An HTTPS URL to clone and fetch from 207 | * A flag that marks the repository as private 208 | 209 | If you plan to implement another forge, please first open an issue so we can go 210 | through the required setup. I'm happy to help! 211 | -------------------------------------------------------------------------------- /docs/src/formatting_and_style.md: -------------------------------------------------------------------------------- 1 | # Formatting & Style 2 | 3 | ## Code formatting 4 | 5 | I'm allergic to discussions about formatting. I'd rather make the computer do it 6 | for me. 7 | 8 | For Rust, just use `cargo fmt`. For Python, use 9 | [black](https://github.com/psf/black). I'd rather not spend any effort in 10 | configuring the formatters (not possible for black anyway). For shell scripts, 11 | use [`shfmt`](https://github.com/mvdan/sh). 12 | 13 | To autoformat all code, use `just fmt` 14 | 15 | ## Style 16 | 17 | Honestly, no idea about style. I'm still learning Rust, so I'm trying to find a 18 | good style. Just try to keep it consistent when you add code. 19 | 20 | ## Linting 21 | 22 | You can use `just lint` to run all lints. 23 | 24 | ### Rust 25 | 26 | Clippy is the guard that prevents shitty code from getting into the code base. 27 | When running `just check`, any clippy suggestions will make the command fail. 28 | So make clippy happy! The easiest way: 29 | 30 | * Commit your changes (so clippy can change safely). 31 | * Run `cargo clippy --fix` to do the easy changes automatically. 32 | * Run `cargo clippy` and take a look at the messages. 33 | 34 | Until now, I had no need to override or silence any clippy suggestions. 35 | 36 | ### Shell 37 | 38 | `shellcheck` lints all shell scripts. As they change very rarely, this is not 39 | too important. 40 | 41 | ## Unsafe code 42 | 43 | Any `unsafe` code is forbidden for now globally via `#![forbid(unsafe_code)]`. 44 | I cannot think of any reason GRM may need `unsafe`. If it comes up, it needs to 45 | be discussed. 46 | -------------------------------------------------------------------------------- /docs/src/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | ## Installation 4 | 5 | Building GRM requires the Rust toolchain to be installed. The easiest way is 6 | using [`rustup`](https://rustup.rs/). Make sure that rustup is properly 7 | installed. 8 | 9 | Make sure that the stable toolchain is installed: 10 | 11 | ```bash 12 | $ rustup toolchain install stable 13 | ``` 14 | 15 | Then, install the build dependencies: 16 | 17 | | Distribution | Command | 18 | | ------------- | ------------------------------------------------------------------------------ | 19 | | Arch Linux | `pacman -S --needed gcc openssl pkg-config` | 20 | | Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` | 21 | 22 | Then, it's a simple command to install the latest stable version: 23 | 24 | ```bash 25 | $ cargo install git-repo-manager 26 | ``` 27 | 28 | If you're brave, you can also run the development build: 29 | 30 | ```bash 31 | $ cargo install --git https://github.com/hakoerber/git-repo-manager.git --branch develop 32 | ``` 33 | 34 | ## Static build 35 | 36 | Note that by default, you will get a dynamically linked executable. 37 | Alternatively, you can also build a statically linked binary. For this, you will 38 | need `musl` and a few other build dependencies installed installed: 39 | 40 | | Distribution | Command | 41 | | ------------- | --------------------------------------------------------------------------- | 42 | | Arch Linux | `pacman -S --needed gcc musl perl make` | 43 | | Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` | 44 | 45 | (`perl` and `make` are required for the OpenSSL build script) 46 | 47 | The, add the musl target via `rustup`: 48 | 49 | ```bash 50 | $ rustup target add x86_64-unknown-linux-musl 51 | ``` 52 | 53 | Then, use a modified build command to get a statically linked binary: 54 | 55 | ```bash 56 | $ cargo install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build 57 | ``` 58 | 59 | ## [Nix](https://nixos.org/) 60 | 61 | Run from github without downloading: 62 | 63 | ```bash 64 | $ nix run github:hakoerber/git-repo-manager/develop -- --version 65 | git-repo-manager 0.7.15 66 | ``` 67 | 68 | Run from local source directory: 69 | 70 | ```bash 71 | $ nix run . -- --version 72 | git-repo-manager 0.7.15 73 | ``` 74 | 75 | Integrate into a [Nix Flake](https://nixos.wiki/wiki/Flakes): 76 | 77 | ```nix 78 | { 79 | inputs = { 80 | ... 81 | git-repo-manager = { 82 | url = "github:hakoerber/git-repo-manager"; 83 | inputs.nixpkgs.follows = "nixpkgs"; 84 | inputs.flake-utils.follows = "flake-utils"; 85 | }; 86 | }; 87 | 88 | outputs = { 89 | ... 90 | pkgs = import inputs.nixpkgs { 91 | ... 92 | overlays = [ inputs.git-repo-manager.overlays.git-repo-manager ]; 93 | }; 94 | }; 95 | } 96 | ``` 97 | -------------------------------------------------------------------------------- /docs/src/local_configuration.md: -------------------------------------------------------------------------------- 1 | # Local Configuration 2 | 3 | When managing multiple git repositories with GRM, you'll generally have a 4 | configuration file containing information about all the repos you have. GRM then 5 | makes sure that you repositories match that configuration. If they don't exist 6 | yet, it will clone them. It will also make sure that all remotes are configured 7 | properly. 8 | 9 | Let's try it out: 10 | 11 | ## Get the example configuration 12 | 13 | ```bash 14 | curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml 15 | ``` 16 | 17 | Then, you're ready to run the first sync. This will clone all configured 18 | repositories and set up the remotes. 19 | 20 | ```bash 21 | $ grm repos sync config --config example.config.toml 22 | [⚙] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git" 23 | [✔] git-repo-manager: Repository successfully cloned 24 | [⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git" 25 | [✔] git-repo-manager: OK 26 | [⚙] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git" 27 | [✔] dotfiles: Repository successfully cloned 28 | [✔] dotfiles: OK 29 | ``` 30 | 31 | If you run it again, it will report no changes: 32 | 33 | ```bash 34 | $ grm repos sync config -c example.config.toml 35 | [✔] git-repo-manager: OK 36 | [✔] dotfiles: OK 37 | ``` 38 | 39 | ### Generate your own configuration 40 | 41 | Now, if you already have a few repositories, it would be quite laborious to 42 | write a configuration from scratch. Luckily, GRM has a way to generate a 43 | configuration from an existing file tree: 44 | 45 | ```bash 46 | grm repos find local ~/your/project/root > config.toml 47 | ``` 48 | 49 | This will detect all repositories and remotes and write them to `config.toml`. 50 | 51 | You can exclude repositories from the generated configuration by providing 52 | a regex that will be test against the path of each discovered repository: 53 | 54 | ```bash 55 | grm repos find local ~/your/project/root --exclude "^.*/subdir/match-(foo|bar)/.*$" > config.toml 56 | ``` 57 | 58 | ### Show the state of your projects 59 | 60 | ```bash 61 | $ grm repos status --config example.config.toml 62 | ╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮ 63 | │ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │ 64 | ╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡ 65 | │ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │ 66 | │ ┆ ┆ ┆ ✔ ┆ ┆ origin │ 67 | ├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤ 68 | │ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │ 69 | ╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯ 70 | ``` 71 | 72 | You can also use `status` without `--config` to check the repository you're 73 | currently in: 74 | 75 | ```bash 76 | $ cd ~/example-projects/dotfiles 77 | $ grm repos status 78 | ╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮ 79 | │ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │ 80 | ╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡ 81 | │ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │ 82 | ╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯ 83 | ``` 84 | 85 | ## YAML 86 | 87 | By default, the repo configuration uses TOML. If you prefer YAML, just give it a 88 | YAML file instead (file ending does not matter, `grm` will figure out the 89 | format). For generating a configuration, pass `--format yaml` to `grm repo 90 | find` which generates a YAML configuration instead of a TOML configuration. 91 | -------------------------------------------------------------------------------- /docs/src/overview.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Welcome! This is the documentation for [Git Repo 4 | Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a tool 5 | that helps you manage git repositories in a declarative way. 6 | 7 | GRM helps you manage git repositories in a declarative way. Configure your 8 | repositories in a TOML or YAML file, GRM does the rest. Take a look at [the 9 | example 10 | configuration](https://github.com/hakoerber/git-repo-manager/blob/master/example.config.toml) 11 | to get a feel for the way you configure your repositories. See the [repository 12 | tree chapter](./repos.md) for details. 13 | 14 | GRM also provides some tooling to work with single git repositories using 15 | `git-worktree`. See [the worktree chapter](./worktrees.md) for more details. 16 | 17 | ## Why use GRM? 18 | 19 | If you're working with a lot of git repositories, GRM can help you to manage 20 | them in an easy way: 21 | 22 | * You want to easily clone many repositories to a new machine. 23 | * You want to change remotes for multiple repositories (e.g. because your GitLab 24 | domain changed). 25 | * You want to get an overview over all repositories you have, and check whether 26 | you forgot to commit or push something. 27 | 28 | If you want to work with [git worktrees](https://git-scm.com/docs/git-worktree) 29 | in a streamlined, easy way, GRM provides you with an opinionated workflow. It's 30 | especially helpful when the following describes you: 31 | 32 | * You're juggling a lot of git branches, switching between them a lot. 33 | * When switching branches, you'd like to just leave your work as-is, without 34 | using the stash or temporary commits. 35 | -------------------------------------------------------------------------------- /docs/src/releases.md: -------------------------------------------------------------------------------- 1 | # Releases 2 | 3 | To make a release, make sure you are on a clean `develop` branch, sync your 4 | remotes and then run `./release (major|minor|patch)`. It will handle a 5 | git-flow-y release, meaning that it will perform a merge from `develop` to 6 | `master`, create a git tag, sync all remotes and run `cargo publish`. 7 | 8 | Make sure to run `just check` before releasing to make sure that nothing is 9 | broken. 10 | 11 | As GRM is still `v0.x`, there is not much consideration for backwards 12 | compatibility. Generally, update the patch version for small stuff and the minor 13 | version for bigger / backwards incompatible changes. 14 | 15 | Generally, it's good to regularly release a new patch release with [updated 16 | dependencies](./dependency_updates.md). As `./release.sh patch` is exposed as a 17 | Justfile target (`release-patch`), it's possible to do both in one step: 18 | 19 | ```bash 20 | $ just update-dependencies check release-patch 21 | ``` 22 | 23 | ## Release notes 24 | 25 | There are currently no release notes. Things are changing quite quickly and 26 | there is simply no need for a record of changes (except the git history of 27 | course). 28 | -------------------------------------------------------------------------------- /docs/src/repos.md: -------------------------------------------------------------------------------- 1 | # Managing Repositories 2 | 3 | GRM helps you manage a bunch of git repositories easily. There are generally two 4 | ways to go about that: 5 | 6 | You can either manage a list of repositories in a TOML or YAML file, and use GRM 7 | to sync the configuration with the state of the repository. 8 | 9 | Or, you can pull repository information from a forge (e.g. GitHub, GitLab) and 10 | clone the repositories. 11 | 12 | There are also hybrid modes where you pull information from a forge and create a 13 | configuration file that you can use later. 14 | -------------------------------------------------------------------------------- /docs/src/testing.md: -------------------------------------------------------------------------------- 1 | # Testing 2 | 3 | There are two distinct test suites: One for unit test (`just test-unit`) and 4 | integration tests (`just test-integration`) that is part of the rust crate, and 5 | a separate e2e test suite in python (`just test-e2e`). 6 | 7 | To run all tests, run `just test`. 8 | 9 | When contributing, consider whether it makes sense to add tests which could 10 | prevent regressions in the future. When fixing bugs, it makes sense to add tests 11 | that expose the wrong behavior beforehand. 12 | 13 | The unit and integration tests are very small and only test a few self-contained 14 | functions (like validation of certain input). 15 | 16 | ## E2E tests 17 | 18 | The main focus of the testing setup lays on the e2e tests. Each user-facing 19 | behavior *should* have a corresponding e2e test. These are the most important 20 | tests, as they test functionality the user will use in the end. 21 | 22 | The test suite is written in python and uses 23 | [pytest](https://docs.pytest.org/en/stable/). There are helper functions that 24 | set up temporary git repositories and remotes in a `tmpfs`. 25 | 26 | Effectively, each tests works like this: 27 | 28 | * Set up some prerequisites (e.g. different git repositories or configuration 29 | files) 30 | * Run `grm` 31 | * Check that everything is according to expected behavior (e.g. that `grm` had 32 | certain output and exit code, that the target repositories have certain 33 | branches, heads and remotes, ...) 34 | 35 | As there are many different scenarios, the tests make heavy use of the 36 | [`@pytest.mark.parametrize`](https://docs.pytest.org/en/stable/how-to/parametrize.html#pytest-mark-parametrize) 37 | decorator to get all permutations of input parameters (e.g. whether a 38 | configuration exists, what a config value is set to, how the repository looks 39 | like, ...) 40 | 41 | Whenever you write a new test, think about the different circumstances that can 42 | happen. What are the failure modes? What affects the behavior? Parametrize each 43 | of these behaviors. 44 | 45 | ### Optimization 46 | 47 | Note: You will most likely not need to read this. 48 | 49 | Each test parameter will exponentially increase the number of tests that will be 50 | run. As a general rule, comprehensiveness is more important than test suite 51 | runtime (so if in doubt, better to add another parameter to catch every edge 52 | case). But try to keep the total runtime sane. Currently, the whole `just test-e2e` 53 | target runs ~8'000 tests and takes around 5 minutes on my machine, exlucding 54 | binary and docker build time. I'd say that keeping it under 10 minutes is a good 55 | idea. 56 | 57 | To optimize tests, look out for two patterns: Dependency and Orthogonality 58 | 59 | #### Dependency 60 | 61 | If a parameter depends on another one, it makes little sense to handle them 62 | independently. Example: You have a paramter that specifies whether a 63 | configuration is used, and another parameter that sets a certain value in that 64 | configuration file. It might look something like this: 65 | 66 | ```python 67 | @pytest.mark.parametrize("use_config", [True, False]) 68 | @pytest.mark.parametrize("use_value", ["0", "1"]) 69 | def test(...): 70 | ``` 71 | 72 | This leads to 4 tests being instantiated. But there is little point in setting a 73 | configuration value when no config is used, so the combinations `(False, "0")` 74 | and `(False, "1")` are redundant. To remedy this, spell out the optimized 75 | permutation manually: 76 | 77 | ```python 78 | @pytest.mark.parametrize("config", ((True, "0"), (True, "1"), (False, None))) 79 | def test(...): 80 | (use_config, use_value) = config 81 | ``` 82 | 83 | This cuts down the number of tests by 25%. If you have more dependent parameters 84 | (e.g. additional configuration values), this gets even better. Generally, this 85 | will cut down the number of tests to 86 | 87 | \\[ \frac{1}{o \cdot c} + \frac{1}{(o \cdot c) ^ {(n + 1)}} \\] 88 | 89 | with \\( o \\) being the number of values of a parent parameters a parameter is 90 | dependent on, \\( c \\) being the cardinality of the test input (so you can 91 | assume \\( o = 1 \\) and \\( c = 2 \\) for boolean parameters), and \\( n \\) 92 | being the number of parameters that are optimized, i.e. folded into their 93 | dependent parameter. 94 | 95 | As an example: Folding down two boolean parameters into one dependent parent 96 | boolean parameter will cut down the number of tests to 62.5%! 97 | 98 | #### Orthogonality 99 | 100 | If different test parameters are independent of each other, there is little 101 | point in testing their combinations. Instead, split them up into different test 102 | functions. For boolean parameters, this will cut the number of tests in half. 103 | 104 | So instead of this: 105 | 106 | ```python 107 | @pytest.mark.parametrize("param1", [True, False]) 108 | @pytest.mark.parametrize("param2", [True, False]) 109 | def test(...): 110 | ``` 111 | 112 | Rather do this: 113 | 114 | ```python 115 | @pytest.mark.parametrize("param1", [True, False]) 116 | def test_param1(...): 117 | 118 | @pytest.mark.parametrize("param2", [True, False]) 119 | def test_param2(...): 120 | ``` 121 | 122 | The tests are running in Docker via docker-compose. This is mainly needed to 123 | test networking functionality like GitLab integration, with the GitLab API being 124 | mocked by a simple flask container. 125 | -------------------------------------------------------------------------------- /docs/src/tutorial.md: -------------------------------------------------------------------------------- 1 | # Tutorial 2 | 3 | Here, you'll find a quick overview over the most common functionality of GRM. 4 | 5 | ## Managing existing repositories 6 | 7 | Let's say you have your git repositories at `~/code`. To start managing them via 8 | GRM, first create a configuration: 9 | 10 | ```bash 11 | grm repos find local ~/code --format yaml > ~/code/config.yml 12 | ``` 13 | 14 | The result may look something like this: 15 | 16 | ```yaml 17 | --- 18 | trees: 19 | - root: ~/code 20 | repos: 21 | - name: git-repo-manager 22 | worktree_setup: true 23 | remotes: 24 | - name: origin 25 | url: "https://github.com/hakoerber/git-repo-manager.git" 26 | type: https 27 | ``` 28 | 29 | To apply the configuration and check whether all repositories are in sync, run 30 | the following: 31 | 32 | ```bash 33 | $ grm repos sync config --config ~/code/config.yml 34 | [✔] git-repo-manager: OK 35 | ``` 36 | 37 | Well, obiously there are no changes. To check how changes would be applied, 38 | let's change the name of the remote (currently `origin`): 39 | 40 | ```bash 41 | $ sed -i 's/name: origin/name: github/' ~/code/config.yml 42 | $ grm repos sync config --config ~/code/config.yml 43 | [⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git" 44 | [⚙] git-repo-manager: Deleting remote "origin" 45 | [✔] git-repo-manager: OK 46 | ``` 47 | 48 | GRM replaced the `origin` remote with `github`. 49 | 50 | The configuration (`~/code/config.yml` in this example) would usually be 51 | something you'd track in git or synchronize between machines via some other 52 | means. Then, on every machine, all your repositories are a single `grm repos 53 | sync` away! 54 | 55 | ## Getting repositories from a forge 56 | 57 | Let's say you have a bunch of repositories on GitHub and you'd like to clone 58 | them all to your local machine. 59 | 60 | To authenticate, you'll need to get a personal access token, as described in 61 | [the forge documentation](./forge_integration.md#github). Let's assume you put 62 | your token into `~/.github_token` (please don't if you're doing this "for 63 | real"!) 64 | 65 | Let's first see what kind of repos we can find: 66 | 67 | ```bash 68 | $ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml 69 | --- 70 | trees: [] 71 | $ 72 | ``` 73 | 74 | Ummm, ok? No repos? This is because you have to *tell* GRM what to look for (if 75 | you don't, GRM will just relax, as it's lazy). 76 | 77 | There are different filters (see [the forge 78 | documentation](./forge_integration.md#filters) for more info). In our case, 79 | we'll just use the `--owner` filter to get all repos that belong to us: 80 | 81 | ```bash 82 | $ grm repos find remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml 83 | --- 84 | trees: 85 | - root: ~/code/github.com 86 | repos: 87 | - name: git-repo-manager 88 | worktree_setup: false 89 | remotes: 90 | - name: origin 91 | url: "https://github.com/hakoerber/git-repo-manager.git" 92 | type: https 93 | ``` 94 | 95 | Nice! The format is the same as we got from `grm repos find local` above. So if 96 | we wanted, we could save this file and use it with `grm repos sync config` as 97 | above. But there is an even easier way: We can directly clone the repositories! 98 | 99 | ```bash 100 | $ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ 101 | [⚙] Cloning into "~/code/github.com/git-repo-manager" from "https://github.com/hakoerber/git-repo-manager.git" 102 | [✔] git-repo-manager: Repository successfully cloned 103 | [✔] git-repo-manager: OK 104 | ``` 105 | 106 | Nice! Just to make sure, let's run the same command again: 107 | 108 | ```bash 109 | $ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ 110 | [✔] git-repo-manager: OK 111 | ``` 112 | 113 | GRM saw that the repository is already there and did nothing (remember, it's 114 | lazy). 115 | 116 | ## Using worktrees 117 | 118 | Worktrees are something that make it easier to work with multiple branches at 119 | the same time in a repository. Let's say we wanted to hack on the codebase of 120 | GRM: 121 | 122 | ```bash 123 | $ cd ~/code/github.com/git-repo-manager 124 | $ ls 125 | .gitignore 126 | Cargo.toml 127 | ... 128 | ``` 129 | 130 | Well, this is just a normal git repository. But let's try worktrees! First, we 131 | have to convert the existing repository to use the special worktree setup. For 132 | all worktree operations, we will use `grm worktree` (or `grm wt` for short): 133 | 134 | ```bash 135 | $ grm wt convert 136 | [✔] Conversion done 137 | $ ls 138 | $ 139 | ``` 140 | 141 | So, the code is gone? Not really, there is just no active worktree right now. So 142 | let's add one for `master`: 143 | 144 | 145 | ```bash 146 | $ grm wt add master --track origin/master 147 | [✔] Conversion done 148 | $ ls 149 | master 150 | $ (cd ./master && git status) 151 | On branch master 152 | nothing to commit, working tree clean 153 | ``` 154 | 155 | Now, a single worktree is kind of pointless (if we only have one, we could also 156 | just use the normal setup, without worktrees). So let's another one for 157 | `develop`: 158 | 159 | ```bash 160 | $ grm wt add develop --track origin/develop 161 | [✔] Conversion done 162 | $ ls 163 | develop 164 | master 165 | $ (cd ./develop && git status) 166 | On branch develop 167 | nothing to commit, working tree clean 168 | ``` 169 | 170 | What's the point? The cool thing is that we can now start working in the 171 | `develop` worktree, without affecting the `master` worktree at all. If you're 172 | working on `develop` and want to quickly see what a certain file looks like in 173 | `master`, just look inside `./master`, it's all there! 174 | 175 | This becomes especially interesting when you have many feature branches and are 176 | working on multiple features at the same time. 177 | 178 | There are a lot of options that influence how worktrees are handled. Maybe you 179 | want to automatically track `origin/master` when you add a worktree called 180 | `master`? Maybe you want your feature branches to have a prefix, so when you're 181 | working on the `feature1` worktree, the remote branch will be 182 | `origin/awesomefeatures/feature1`? Check out [the chapter on 183 | worktrees](./worktrees.md) for all the things that are possible. 184 | -------------------------------------------------------------------------------- /docs/src/worktree_behavior.md: -------------------------------------------------------------------------------- 1 | # Behavior Details 2 | 3 | When working with worktrees and GRM, there is a lot going on under the hood. 4 | Each time you create a new worktree, GRM has to figure out what commit to set 5 | your new branch to and how to configure any potential remote branches. 6 | 7 | To state again, the most important guideline is the following: 8 | 9 | **The branch inside the worktree is always the same as the directory name of the 10 | worktree.** 11 | 12 | The second set of guidelines relates to the commit to check out, and the remote 13 | branches to use: 14 | 15 | * When a branch already exists, you will get a worktree for that branch 16 | * Existing local branches are never changed 17 | * Only do remote operations if specifically requested (via configuration file or 18 | command line parameters) 19 | * When you specify `--track`, you will get that exact branch as the tracking 20 | branch 21 | * When you specify `--no-track`, you will get no tracking branch 22 | 23 | Apart from that, GRM tries to do The Right ThingTM. It should be as 24 | little surprising as possible. 25 | 26 | In 99% of the cases, you will not have to care about the details, as the normal 27 | workflows are covered by the rules above. In case you want to know the exact 28 | behavior "specification", take a look at the [module documentation for 29 | `grm::worktree`](https://docs.rs/git-repo-manager/latest/grm/worktree/index.html). 30 | 31 | If you think existing behavior is super-duper confusing and you have a better 32 | idea, do not hesitate to open a GitHub issue to discuss this! 33 | -------------------------------------------------------------------------------- /docs/src/worktree_remotes.md: -------------------------------------------------------------------------------- 1 | # Worktrees and Remotes 2 | 3 | To fetch all remote references from all remotes in a worktree setup, you can use 4 | the following command: 5 | 6 | ``` 7 | $ grm wt fetch 8 | [✔] Fetched from all remotes 9 | ``` 10 | 11 | This is equivalent to running `git fetch --all` in any of the worktrees. 12 | 13 | Often, you may want to pull all remote changes into your worktrees. For this, 14 | use the `git pull` equivalent: 15 | 16 | ``` 17 | $ grm wt pull 18 | [✔] master: Done 19 | [✔] my-cool-branch: Done 20 | ``` 21 | 22 | This will refuse when there are local changes, or if the branch cannot be fast 23 | forwarded. If you want to rebase your local branches, use the `--rebase` switch: 24 | 25 | ``` 26 | $ grm wt pull --rebase 27 | [✔] master: Done 28 | [✔] my-cool-branch: Done 29 | ``` 30 | 31 | As noted, this will fail if there are any local changes in your worktree. If you 32 | want to stash these changes automatically before the pull (and unstash them 33 | afterwards), use the `--stash` option. 34 | 35 | This will rebase your changes onto the upstream branch. This is mainly helpful 36 | for persistent branches that change on the remote side. 37 | 38 | There is a similar rebase feature that rebases onto the **default** branch 39 | instead: 40 | 41 | ``` 42 | $ grm wt rebase 43 | [✔] master: Done 44 | [✔] my-cool-branch: Done 45 | ``` 46 | 47 | This is super helpful for feature branches. If you want to incorporate changes 48 | made on the remote branches, use `grm wt rebase` and all your branches will be 49 | up to date. If you want to also update to remote tracking branches in one go, 50 | use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting 51 | on non-fast-forwards: 52 | 53 | ``` 54 | $ grm wt rebase --pull --rebase 55 | [✔] master: Done 56 | [✔] my-cool-branch: Done 57 | ``` 58 | 59 | "So, what's the difference between `pull --rebase` and `rebase --pull`? Why the 60 | hell is there a `--rebase` flag in the `rebase` command?" 61 | 62 | Yes, it's kind of weird. Remember that `pull` only ever updates each worktree to 63 | their remote branch, if possible. `rebase` rebases onto the **default** branch 64 | instead. The switches to `rebase` are just convenience, so you do not have to 65 | run two commands. 66 | 67 | * `rebase --pull` is the same as `pull` && `rebase` 68 | * `rebase --pull --rebase` is the same as `pull --rebase` && `rebase` 69 | 70 | I understand that the UX is not the most intuitive. If you can think of an 71 | improvement, please let me know (e.g. via an GitHub issue)! 72 | 73 | As with `pull`, `rebase` will also refuse to run when there are changes in your 74 | worktree. And you can also use the `--stash` option to stash/unstash changes 75 | automatically. 76 | -------------------------------------------------------------------------------- /docs/src/worktree_working.md: -------------------------------------------------------------------------------- 1 | # Working with Worktrees 2 | 3 | ## Creating a new worktree 4 | 5 | To actually work, you'll first have to create a new worktree checkout. All 6 | worktree-related commands are available as subcommands of `grm worktree` (or 7 | `grm wt` for short): 8 | 9 | ``` 10 | $ grm wt add mybranch 11 | [✔] Worktree mybranch created 12 | ``` 13 | 14 | You'll see that there is now a directory called `mybranch` that contains a 15 | checkout of your repository, using the branch `mybranch` 16 | 17 | ```bash 18 | $ cd ./mybranch && git status 19 | On branch mybranch 20 | nothing to commit, working tree clean 21 | ``` 22 | 23 | You can work in this repository as usual. Make changes, commit them, revert 24 | them, whatever you're up to :) 25 | 26 | Just note that you *should* not change the branch inside the worktree directory. 27 | There is nothing preventing you from doing so, but you will notice that you'll 28 | run into problems when trying to remove a worktree (more on that later). It may 29 | also lead to confusing behavior, as there can be no two worktrees that have the 30 | same branch checked out. So if you decide to use the worktree setup, go all in, 31 | let `grm` manage your branches and bury `git branch` (and `git checkout -b`). 32 | 33 | You will notice that there is no tracking branch set up for the new branch. You 34 | can of course set up one manually after creating the worktree, but there is an 35 | easier way, using the `--track` flag during creation. Let's create another 36 | worktree. Go back to the root of the repository, and run: 37 | 38 | ```bash 39 | $ grm wt add mybranch2 --track origin/mybranch2 40 | [✔] Worktree mybranch2 created 41 | ``` 42 | 43 | You'll see that this branch is now tracking `mybranch` on the `origin` remote: 44 | 45 | ```bash 46 | $ cd ./mybranch2 && git status 47 | On branch mybranch 48 | 49 | Your branch is up to date with 'origin/mybranch2'. 50 | nothing to commit, working tree clean 51 | ``` 52 | 53 | The behavior of `--track` differs depending on the existence of the remote 54 | branch: 55 | 56 | * If the remote branch already exists, `grm` uses it as the base of the new 57 | local branch. 58 | * If the remote branch does not exist (as in our example), `grm` will create a 59 | new remote tracking branch, using the default branch (either `main` or 60 | `master`) as the base 61 | 62 | Often, you'll have a workflow that uses tracking branches by default. It would 63 | be quite tedious to add `--track` every single time. Luckily, the `grm.toml` 64 | file supports defaults for the tracking behavior. See this for an example: 65 | 66 | ```toml 67 | [track] 68 | default = true 69 | default_remote = "origin" 70 | ``` 71 | 72 | This will set up a tracking branch on `origin` that has the same name as the 73 | local branch. 74 | 75 | Sometimes, you might want to have a certain prefix for all your tracking 76 | branches. Maybe to prevent collisions with other contributors. You can simply 77 | set `default_remote_prefix` in `grm.toml`: 78 | 79 | ```toml 80 | [track] 81 | default = true 82 | default_remote = "origin" 83 | default_remote_prefix = "myname" 84 | ``` 85 | 86 | When using branch `my-feature-branch`, the remote tracking branch would be 87 | `origin/myname/my-feature-branch` in this case. 88 | 89 | Note that `--track` overrides any configuration in `grm.toml`. If you want to 90 | disable tracking, use `--no-track`. 91 | 92 | ## Showing the status of your worktrees 93 | 94 | There is a handy little command that will show your an overview over all 95 | worktrees in a repository, including their status (i.e. changes files). Just run 96 | the following in the root of your repository: 97 | 98 | ``` 99 | $ grm wt status 100 | ╭───────────┬────────┬──────────┬──────────────────╮ 101 | │ Worktree ┆ Status ┆ Branch ┆ Remote branch │ 102 | ╞═══════════╪════════╪══════════╪══════════════════╡ 103 | │ mybranch ┆ ✔ ┆ mybranch ┆ │ 104 | │ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │ 105 | ╰───────────┴────────┴──────────┴──────────────────╯ 106 | ``` 107 | 108 | The "Status" column would show any uncommitted changes (new / modified / deleted 109 | files) and the "Remote branch" would show differences to the remote branch (e.g. 110 | if there are new pushes to the remote branch that are not yet incorporated into 111 | your local branch). 112 | 113 | 114 | ## Deleting worktrees 115 | 116 | If you're done with your worktrees, use `grm wt delete` to delete them. Let's 117 | start with `mybranch2`: 118 | 119 | ``` 120 | $ grm wt delete mybranch2 121 | [✔] Worktree mybranch2 deleted 122 | ``` 123 | 124 | Easy. On to `mybranch`: 125 | 126 | ``` 127 | $ grm wt delete mybranch 128 | [!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete 129 | ``` 130 | 131 | Hmmm. `grm` tells you: 132 | 133 | "Hey, there is no remote branch that you could have pushed your changes to. I'd 134 | rather not delete work that you cannot recover." 135 | 136 | Note that `grm` is very cautious here. As your repository will not be deleted, 137 | you could still recover the commits via 138 | [`git-reflog`](https://git-scm.com/docs/git-reflog). But better safe than 139 | sorry! Note that you'd get a similar error message if your worktree had any 140 | uncommitted files, for the same reason. Now you can either commit & push your 141 | changes, or your tell `grm` that you know what you're doing: 142 | 143 | ``` 144 | $ grm wt delete mybranch --force 145 | [✔] Worktree mybranch deleted 146 | ``` 147 | 148 | If you just want to delete all worktrees that do not contain any changes, you 149 | can also use the following: 150 | 151 | ``` 152 | $ grm wt clean 153 | ``` 154 | 155 | Note that this will not delete the default branch of the repository. It can of 156 | course still be delete with `grm wt delete` if necessary. 157 | 158 | ### Converting an existing repository 159 | 160 | It is possible to convert an existing directory to a worktree setup, using `grm 161 | wt convert`. This command has to be run in the root of the repository you want 162 | to convert: 163 | 164 | ``` 165 | $ grm wt convert 166 | [✔] Conversion successful 167 | ``` 168 | 169 | This command will refuse to run if you have any changes in your repository. 170 | Commit them and try again! 171 | 172 | Afterwards, the directory is empty, as there are no worktrees checked out yet. 173 | Now you can use the usual commands to set up worktrees. 174 | -------------------------------------------------------------------------------- /docs/src/worktrees.md: -------------------------------------------------------------------------------- 1 | # Git Worktrees 2 | 3 | ## Why? 4 | The default workflow when using git is having your repository in a single 5 | directory. Then, you can check out a certain reference (usually a branch), 6 | which will update the files in the directory to match the state of that 7 | reference. Most of the time, this is exactly what you need and works perfectly. 8 | But especially when you're working with branches a lot, you may notice that 9 | there is a lot of work required to make everything run smoothly. 10 | 11 | Maybe you have experienced the following: You're working on a feature branch. 12 | Then, for some reason, you have to change branches (maybe to investigate some 13 | issue). But you get the following: 14 | 15 | ``` 16 | error: Your local changes to the following files would be overwritten by checkout 17 | ``` 18 | 19 | Now you can create a temporary commit or stash your changes. In any case, you 20 | have some mental overhead before you can work on something else. Especially with 21 | stashes, you'll have to remember to do a `git stash pop` before resuming your 22 | work (I cannot count the number of times where I "rediscovered" some code hidden 23 | in some old stash I forgot about). Also, conflicts on a `git stash pop` are just 24 | horrible. 25 | 26 | And even worse: If you're currently in the process of resolving merge conflicts 27 | or an interactive rebase, there is just no way to "pause" this work to check out 28 | a different branch. 29 | 30 | Sometimes, it's crucial to have an unchanging state of your repository until 31 | some long-running process finishes. I'm thinking of Ansible and Terraform runs. 32 | I'd rather not change to a different branch while ansible or Terraform are 33 | running as I have no idea how those tools would behave (and I'm not too eager to 34 | find out). 35 | 36 | In any case, Git Worktrees are here for the rescue: 37 | 38 | ## What are git worktrees? 39 | 40 | [Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have 41 | multiple independent checkouts of your repository on different directories. You 42 | can have multiple directories that correspond to different references in your 43 | repository. Each worktree has it's independent working tree (duh) and index, so 44 | there is no way to run into conflicts. Changing to a different branch is just a 45 | `cd` away (if the worktree is already set up). 46 | 47 | ## Worktrees in GRM 48 | 49 | GRM exposes an opinionated way to use worktrees in your repositories. 50 | Opinionated, because there is a single invariant that makes reasoning about your 51 | worktree setup quite easy: 52 | 53 | **The branch inside the worktree is always the same as the directory name of the 54 | worktree.** 55 | 56 | In other words: If you're checking out branch `mybranch` into a new worktree, 57 | the worktree directory will be named `mybranch`. 58 | 59 | GRM can be used with both "normal" and worktree-enabled repositories. But note 60 | that a single repository can be either the former or the latter. You'll have to 61 | decide during the initial setup which way you want to go for that repository. 62 | 63 | If you want to clone your repository in a worktree-enabled way, specify 64 | `worktree_setup = true` for the repository in your `config.toml`: 65 | 66 | ```toml 67 | [[trees.repos]] 68 | name = "git-repo-manager" 69 | worktree_setup = true 70 | ``` 71 | 72 | Now, when you run a `grm sync`, you'll notice that the directory of the 73 | repository is empty! Well, not totally, there is a hidden directory called 74 | `.git-main-working-tree`. This is where the repository actually "lives" (it's a 75 | bare checkout). 76 | 77 | Note that there are few specific things you can configure for a certain 78 | workspace. This is all done in an optional `grm.toml` file right in the root of 79 | the worktree. More on that later. 80 | 81 | 82 | ## Manual access 83 | 84 | GRM isn't doing any magic, it's just git under the hood. If you need to have 85 | access to the underlying git repository, you can always do this: 86 | 87 | ``` 88 | $ git --git-dir ./.git-main-working-tree [...] 89 | ``` 90 | 91 | This should never be required (whenever you have to do this, you can consider 92 | this a bug in GRM and open an 93 | [issue](https://github.com/hakoerber/git-repo-manager/issues/new), but it may 94 | help in a pinch. 95 | 96 | -------------------------------------------------------------------------------- /e2e_tests/.gitignore: -------------------------------------------------------------------------------- 1 | /__pycache__/ 2 | -------------------------------------------------------------------------------- /e2e_tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def pytest_configure(config): 5 | os.environ["GIT_AUTHOR_NAME"] = "Example user" 6 | os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com" 7 | os.environ["GIT_COMMITTER_NAME"] = "Example user" 8 | os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com" 9 | 10 | 11 | def pytest_unconfigure(config): 12 | pass 13 | -------------------------------------------------------------------------------- /e2e_tests/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | pytest: 3 | build: ./docker 4 | volumes: 5 | - type: bind 6 | source: ./ 7 | target: /tests 8 | read_only: true 9 | - type: tmpfs 10 | target: /tmp 11 | environment: 12 | TMPDIR: /tmp 13 | depends_on: 14 | - rest 15 | command: 16 | - "true" 17 | networks: 18 | main: 19 | 20 | rest: 21 | build: ./docker-rest/ 22 | expose: 23 | - "5000" 24 | networks: 25 | main: 26 | aliases: 27 | - alternate-rest 28 | 29 | networks: 30 | main: 31 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/debian:11.3 2 | 3 | WORKDIR /app 4 | 5 | ENV FLASK_APP=app.py 6 | 7 | RUN apt-get update \ 8 | && apt-get install -y \ 9 | dumb-init \ 10 | python3-flask \ 11 | python3-jinja2 \ 12 | && apt-get clean \ 13 | && rm -rf /var/lib/apt/lists/* 14 | 15 | EXPOSE 5000 16 | 17 | COPY flask . 18 | 19 | CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"] 20 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | 3 | app = Flask(__name__) 4 | app.url_map.strict_slashes = False 5 | 6 | import github # noqa: E402,F401 7 | import gitlab # noqa: E402,F401 8 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/github.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | 3 | import jinja2 4 | from app import app 5 | from flask import abort, jsonify, make_response, request 6 | 7 | 8 | def check_headers(): 9 | if request.headers.get("accept") != "application/vnd.github.v3+json": 10 | app.logger.error("Invalid accept header") 11 | abort(500) 12 | auth_header = request.headers.get("authorization") 13 | if auth_header != "token secret-token:myauthtoken": 14 | app.logger.error("Invalid authorization header: %s", auth_header) 15 | abort( 16 | make_response( 17 | jsonify( 18 | { 19 | "message": "Bad credentials", 20 | "documentation_url": "https://docs.example.com/rest", 21 | } 22 | ), 23 | 401, 24 | ) 25 | ) 26 | 27 | 28 | def add_pagination(response, page, last_page): 29 | host = request.headers["host"] 30 | link_header = "" 31 | 32 | def args(page): 33 | args = request.args.copy() 34 | args["page"] = page 35 | return "&".join([f"{k}={v}" for k, v in args.items()]) 36 | 37 | if page < last_page: 38 | link_header += ( 39 | f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", ' 40 | ) 41 | link_header += ( 42 | f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"' 43 | ) 44 | response.headers["link"] = link_header 45 | 46 | 47 | def read_project_files(namespaces=[]): 48 | last_page = 4 49 | page = int(request.args.get("page", "1")) 50 | response_file = f"./github_api_page_{page}.json.j2" 51 | if not os.path.exists(response_file): 52 | return jsonify([]) 53 | 54 | response = make_response( 55 | jinja2.Template(open(response_file).read()).render( 56 | namespace=namespaces[page - 1] 57 | ) 58 | ) 59 | add_pagination(response, page, last_page) 60 | response.headers["content-type"] = "application/json" 61 | return response 62 | 63 | 64 | def single_namespaced_projects(namespace): 65 | return read_project_files([namespace] * 4) 66 | 67 | 68 | def mixed_projects(namespaces): 69 | return read_project_files(namespaces) 70 | 71 | 72 | @app.route("/github/users//repos/") 73 | def github_user_repos(user): 74 | check_headers() 75 | if user == "myuser1": 76 | return single_namespaced_projects("myuser1") 77 | return jsonify([]) 78 | 79 | 80 | @app.route("/github/orgs//repos/") 81 | def github_group_repos(group): 82 | check_headers() 83 | if not (request.args.get("type") == "all"): 84 | abort(500, "wrong arguments") 85 | if group == "mygroup1": 86 | return single_namespaced_projects("mygroup1") 87 | return jsonify([]) 88 | 89 | 90 | @app.route("/github/user/repos/") 91 | def github_own_repos(): 92 | check_headers() 93 | return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"]) 94 | 95 | 96 | @app.route("/github/user/") 97 | def github_user(): 98 | check_headers() 99 | response = make_response(open("./github_api_user.json").read()) 100 | response.headers["content-type"] = "application/json" 101 | return response 102 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/github_api_page_2.json.j2: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 3, 4 | "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", 5 | "name": "myproject3", 6 | "full_name": "{{ namespace }}/myproject3", 7 | "private": false, 8 | "owner": { 9 | "login": "someuser", 10 | "id": 1, 11 | "node_id": "MDQ6VXNlcjM3NDg2OTY=", 12 | "avatar_url": "https://example.com/u/3748696?v=4", 13 | "gravatar_id": "", 14 | "url": "https://api.example.com/users/{{ namespace }}", 15 | "html_url": "https://example.com/{{ namespace }}", 16 | "followers_url": "https://api.example.com/users/{{ namespace }}/followers", 17 | "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", 18 | "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", 19 | "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", 20 | "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", 21 | "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", 22 | "repos_url": "https://api.example.com/users/{{ namespace }}/repos", 23 | "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", 24 | "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", 25 | "type": "User", 26 | "site_admin": false 27 | }, 28 | "html_url": "https://example.com/{{ namespace }}/myproject3", 29 | "description": "Shell script for automatically building ACI containers from scratch using acbuild.", 30 | "fork": false, 31 | "url": "https://api.example.com/repos/{{ namespace }}/myproject3", 32 | "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks", 33 | "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}", 34 | "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}", 35 | "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams", 36 | "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks", 37 | "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}", 38 | "events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events", 39 | "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}", 40 | "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}", 41 | "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags", 42 | "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}", 43 | "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}", 44 | "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}", 45 | "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}", 46 | "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}", 47 | "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages", 48 | "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers", 49 | "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors", 50 | "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers", 51 | "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription", 52 | "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}", 53 | "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}", 54 | "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}", 55 | "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}", 56 | "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}", 57 | "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}", 58 | "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges", 59 | "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}", 60 | "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads", 61 | "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}", 62 | "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}", 63 | "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}", 64 | "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}", 65 | "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}", 66 | "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}", 67 | "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments", 68 | "created_at": "2016-01-07T22:27:54Z", 69 | "updated_at": "2021-11-20T16:15:37Z", 70 | "pushed_at": "2021-11-20T16:15:34Z", 71 | "git_url": "git://example.com/{{ namespace }}/myproject3.git", 72 | "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git", 73 | "clone_url": "https://example.com/{{ namespace }}/myproject3.git", 74 | "svn_url": "https://example.com/{{ namespace }}/myproject3", 75 | "homepage": null, 76 | "size": 12, 77 | "stargazers_count": 0, 78 | "watchers_count": 0, 79 | "language": "Shell", 80 | "has_issues": true, 81 | "has_projects": true, 82 | "has_downloads": true, 83 | "has_wiki": true, 84 | "has_pages": false, 85 | "forks_count": 0, 86 | "mirror_url": null, 87 | "archived": false, 88 | "disabled": false, 89 | "open_issues_count": 0, 90 | "license": { 91 | "key": "apache-2.0", 92 | "name": "Apache License 2.0", 93 | "spdx_id": "Apache-2.0", 94 | "url": "https://api.example.com/licenses/apache-2.0", 95 | "node_id": "MDc6TGljZW5zZTI=" 96 | }, 97 | "allow_forking": true, 98 | "is_template": false, 99 | "topics": [ 100 | 101 | ], 102 | "visibility": "public", 103 | "forks": 0, 104 | "open_issues": 0, 105 | "watchers": 0, 106 | "default_branch": "master", 107 | "permissions": { 108 | "admin": true, 109 | "maintain": true, 110 | "push": true, 111 | "triage": true, 112 | "pull": true 113 | } 114 | } 115 | ] 116 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/github_api_page_3.json.j2: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 3, 4 | "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", 5 | "name": "myproject4", 6 | "full_name": "{{ namespace }}/myproject4", 7 | "private": false, 8 | "owner": { 9 | "login": "someuser", 10 | "id": 1, 11 | "node_id": "MDQ6VXNlcjM3NDg2OTY=", 12 | "avatar_url": "https://example.com/u/3748696?v=4", 13 | "gravatar_id": "", 14 | "url": "https://api.example.com/users/{{ namespace }}", 15 | "html_url": "https://example.com/{{ namespace }}", 16 | "followers_url": "https://api.example.com/users/{{ namespace }}/followers", 17 | "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", 18 | "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", 19 | "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", 20 | "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", 21 | "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", 22 | "repos_url": "https://api.example.com/users/{{ namespace }}/repos", 23 | "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", 24 | "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", 25 | "type": "User", 26 | "site_admin": false 27 | }, 28 | "html_url": "https://example.com/{{ namespace }}/myproject4", 29 | "description": "Shell script for automatically building ACI containers from scratch using acbuild.", 30 | "fork": false, 31 | "url": "https://api.example.com/repos/{{ namespace }}/myproject4", 32 | "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks", 33 | "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}", 34 | "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}", 35 | "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams", 36 | "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks", 37 | "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}", 38 | "events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events", 39 | "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}", 40 | "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}", 41 | "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags", 42 | "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}", 43 | "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}", 44 | "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}", 45 | "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}", 46 | "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}", 47 | "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages", 48 | "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers", 49 | "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors", 50 | "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers", 51 | "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription", 52 | "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}", 53 | "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}", 54 | "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}", 55 | "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}", 56 | "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}", 57 | "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}", 58 | "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges", 59 | "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}", 60 | "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads", 61 | "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}", 62 | "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}", 63 | "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}", 64 | "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}", 65 | "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}", 66 | "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}", 67 | "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments", 68 | "created_at": "2016-01-07T22:27:54Z", 69 | "updated_at": "2021-11-20T16:15:37Z", 70 | "pushed_at": "2021-11-20T16:15:34Z", 71 | "git_url": "git://example.com/{{ namespace }}/myproject4.git", 72 | "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git", 73 | "clone_url": "https://example.com/{{ namespace }}/myproject4.git", 74 | "svn_url": "https://example.com/{{ namespace }}/myproject4", 75 | "homepage": null, 76 | "size": 12, 77 | "stargazers_count": 0, 78 | "watchers_count": 0, 79 | "language": "Shell", 80 | "has_issues": true, 81 | "has_projects": true, 82 | "has_downloads": true, 83 | "has_wiki": true, 84 | "has_pages": false, 85 | "forks_count": 0, 86 | "mirror_url": null, 87 | "archived": false, 88 | "disabled": false, 89 | "open_issues_count": 0, 90 | "license": { 91 | "key": "apache-2.0", 92 | "name": "Apache License 2.0", 93 | "spdx_id": "Apache-2.0", 94 | "url": "https://api.example.com/licenses/apache-2.0", 95 | "node_id": "MDc6TGljZW5zZTI=" 96 | }, 97 | "allow_forking": true, 98 | "is_template": false, 99 | "topics": [ 100 | 101 | ], 102 | "visibility": "public", 103 | "forks": 0, 104 | "open_issues": 0, 105 | "watchers": 0, 106 | "default_branch": "master", 107 | "permissions": { 108 | "admin": true, 109 | "maintain": true, 110 | "push": true, 111 | "triage": true, 112 | "pull": true 113 | } 114 | } 115 | ] 116 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/github_api_page_4.json.j2: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 3, 4 | "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", 5 | "name": "myproject5", 6 | "full_name": "{{ namespace }}/myproject5", 7 | "private": false, 8 | "owner": { 9 | "login": "someuser", 10 | "id": 1, 11 | "node_id": "MDQ6VXNlcjM3NDg2OTY=", 12 | "avatar_url": "https://example.com/u/3748696?v=4", 13 | "gravatar_id": "", 14 | "url": "https://api.example.com/users/{{ namespace }}", 15 | "html_url": "https://example.com/{{ namespace }}", 16 | "followers_url": "https://api.example.com/users/{{ namespace }}/followers", 17 | "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", 18 | "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", 19 | "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", 20 | "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", 21 | "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", 22 | "repos_url": "https://api.example.com/users/{{ namespace }}/repos", 23 | "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", 24 | "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", 25 | "type": "User", 26 | "site_admin": false 27 | }, 28 | "html_url": "https://example.com/{{ namespace }}/myproject5", 29 | "description": "Shell script for automatically building ACI containers from scratch using acbuild.", 30 | "fork": false, 31 | "url": "https://api.example.com/repos/{{ namespace }}/myproject5", 32 | "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks", 33 | "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}", 34 | "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}", 35 | "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams", 36 | "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks", 37 | "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}", 38 | "events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events", 39 | "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}", 40 | "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}", 41 | "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags", 42 | "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}", 43 | "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}", 44 | "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}", 45 | "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}", 46 | "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}", 47 | "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages", 48 | "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers", 49 | "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors", 50 | "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers", 51 | "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription", 52 | "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}", 53 | "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}", 54 | "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}", 55 | "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}", 56 | "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}", 57 | "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}", 58 | "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges", 59 | "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}", 60 | "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads", 61 | "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}", 62 | "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}", 63 | "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}", 64 | "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}", 65 | "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}", 66 | "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}", 67 | "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments", 68 | "created_at": "2016-01-07T22:27:54Z", 69 | "updated_at": "2021-11-20T16:15:37Z", 70 | "pushed_at": "2021-11-20T16:15:34Z", 71 | "git_url": "git://example.com/{{ namespace }}/myproject5.git", 72 | "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git", 73 | "clone_url": "https://example.com/{{ namespace }}/myproject5.git", 74 | "svn_url": "https://example.com/{{ namespace }}/myproject5", 75 | "homepage": null, 76 | "size": 12, 77 | "stargazers_count": 0, 78 | "watchers_count": 0, 79 | "language": "Shell", 80 | "has_issues": true, 81 | "has_projects": true, 82 | "has_downloads": true, 83 | "has_wiki": true, 84 | "has_pages": false, 85 | "forks_count": 0, 86 | "mirror_url": null, 87 | "archived": false, 88 | "disabled": false, 89 | "open_issues_count": 0, 90 | "license": { 91 | "key": "apache-2.0", 92 | "name": "Apache License 2.0", 93 | "spdx_id": "Apache-2.0", 94 | "url": "https://api.example.com/licenses/apache-2.0", 95 | "node_id": "MDc6TGljZW5zZTI=" 96 | }, 97 | "allow_forking": true, 98 | "is_template": false, 99 | "topics": [ 100 | 101 | ], 102 | "visibility": "public", 103 | "forks": 0, 104 | "open_issues": 0, 105 | "watchers": 0, 106 | "default_branch": "master", 107 | "permissions": { 108 | "admin": true, 109 | "maintain": true, 110 | "push": true, 111 | "triage": true, 112 | "pull": true 113 | } 114 | } 115 | ] 116 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/github_api_user.json: -------------------------------------------------------------------------------- 1 | { 2 | "login": "myuser1", 3 | "id": 1, 4 | "node_id": "MDQ6VXNlcjE=", 5 | "avatar_url": "https://example.com/images/error/octocat_happy.gif", 6 | "gravatar_id": "", 7 | "url": "https://api.example.com/users/octocat", 8 | "html_url": "https://example.com/octocat", 9 | "followers_url": "https://api.example.com/users/octocat/followers", 10 | "following_url": "https://api.example.com/users/octocat/following{/other_user}", 11 | "gists_url": "https://api.example.com/users/octocat/gists{/gist_id}", 12 | "starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}", 13 | "subscriptions_url": "https://api.example.com/users/octocat/subscriptions", 14 | "organizations_url": "https://api.example.com/users/octocat/orgs", 15 | "repos_url": "https://api.example.com/users/octocat/repos", 16 | "events_url": "https://api.example.com/users/octocat/events{/privacy}", 17 | "received_events_url": "https://api.example.com/users/octocat/received_events", 18 | "type": "User", 19 | "site_admin": false, 20 | "name": "monalisa octocat", 21 | "company": "GitHub", 22 | "blog": "https://example.com/blog", 23 | "location": "San Francisco", 24 | "email": "octocat@example.com", 25 | "hireable": false, 26 | "bio": "There once was...", 27 | "twitter_username": "monatheoctocat", 28 | "public_repos": 2, 29 | "public_gists": 1, 30 | "followers": 20, 31 | "following": 0, 32 | "created_at": "2008-01-14T04:33:35Z", 33 | "updated_at": "2008-01-14T04:33:35Z", 34 | "private_gists": 81, 35 | "total_private_repos": 100, 36 | "owned_private_repos": 100, 37 | "disk_usage": 10000, 38 | "collaborators": 8, 39 | "two_factor_authentication": true, 40 | "plan": { 41 | "name": "Medium", 42 | "space": 400, 43 | "private_repos": 20, 44 | "collaborators": 0 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | 3 | import jinja2 4 | from app import app 5 | from flask import abort, jsonify, make_response, request 6 | 7 | 8 | def check_headers(): 9 | if request.headers.get("accept") != "application/json": 10 | app.logger.error("Invalid accept header") 11 | abort(500) 12 | auth_header = request.headers.get("authorization") 13 | if auth_header != "bearer secret-token:myauthtoken": 14 | app.logger.error("Invalid authorization header: %s", auth_header) 15 | abort( 16 | make_response( 17 | jsonify( 18 | { 19 | "message": "Bad credentials", 20 | "documentation_url": "https://docs.example.com/rest", 21 | } 22 | ), 23 | 401, 24 | ) 25 | ) 26 | 27 | 28 | def add_pagination(response, page, last_page): 29 | host = request.headers["host"] 30 | link_header = "" 31 | 32 | def args(page): 33 | args = request.args.copy() 34 | args["page"] = page 35 | return "&".join([f"{k}={v}" for k, v in args.items()]) 36 | 37 | if page < last_page: 38 | link_header += ( 39 | f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", ' 40 | ) 41 | link_header += ( 42 | f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"' 43 | ) 44 | response.headers["link"] = link_header 45 | 46 | 47 | def read_project_files(namespaces=[]): 48 | last_page = 4 49 | page = int(request.args.get("page", "1")) 50 | response_file = f"./gitlab_api_page_{page}.json" 51 | if not os.path.exists(response_file): 52 | return jsonify([]) 53 | 54 | response = make_response( 55 | jinja2.Template(open(response_file).read()).render( 56 | namespace=namespaces[page - 1] 57 | ) 58 | ) 59 | add_pagination(response, page, last_page) 60 | response.headers["content-type"] = "application/json" 61 | return response 62 | 63 | 64 | def single_namespaced_projects(namespace): 65 | return read_project_files([namespace] * 4) 66 | 67 | 68 | def mixed_projects(namespaces): 69 | return read_project_files(namespaces) 70 | 71 | 72 | @app.route("/gitlab/api/v4/users//projects") 73 | def gitlab_user_repos(user): 74 | check_headers() 75 | if user == "myuser1": 76 | return single_namespaced_projects("myuser1") 77 | return jsonify([]) 78 | 79 | 80 | @app.route("/gitlab/api/v4/groups//projects") 81 | def gitlab_group_repos(group): 82 | check_headers() 83 | if not ( 84 | request.args.get("include_subgroups") == "true" 85 | and request.args.get("archived") == "false" 86 | ): 87 | abort(500, "wrong arguments") 88 | if group == "mygroup1": 89 | return single_namespaced_projects("mygroup1") 90 | return jsonify([]) 91 | 92 | 93 | @app.route("/gitlab/api/v4/projects/") 94 | def gitlab_own_repos(): 95 | check_headers() 96 | return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"]) 97 | 98 | 99 | @app.route("/gitlab/api/v4/user/") 100 | def gitlab_user(): 101 | check_headers() 102 | response = make_response(open("./gitlab_api_user.json").read()) 103 | response.headers["content-type"] = "application/json" 104 | return response 105 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab_api_page_1.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "description": "", 5 | "name": "myproject1", 6 | "name_with_namespace": "{{ namespace }} / myproject1", 7 | "path": "myproject1", 8 | "path_with_namespace": "{{ namespace }}/myproject1", 9 | "created_at": "2020-11-26T17:23:39.904Z", 10 | "default_branch": "master", 11 | "tag_list": [], 12 | "topics": [], 13 | "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git", 14 | "http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git", 15 | "web_url": "https://example.com/{{ namespace }}/myproject1", 16 | "readme_url": null, 17 | "avatar_url": null, 18 | "forks_count": 0, 19 | "star_count": 0, 20 | "last_activity_at": "2020-11-26T17:23:39.904Z", 21 | "namespace": { 22 | "id": 3, 23 | "name": "{{ namespace }}", 24 | "path": "{{ namespace }}", 25 | "kind": "group", 26 | "full_path": "{{ namespace }}", 27 | "parent_id": null, 28 | "avatar_url": "/uploads/-/system/group/avatar/5/x.png", 29 | "web_url": "https://example.com/groups/{{ namespace }}" 30 | }, 31 | "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1", 32 | "_links": { 33 | "self": "https://example.com/api/v4/projects/2", 34 | "issues": "https://example.com/api/v4/projects/2/issues", 35 | "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", 36 | "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", 37 | "labels": "https://example.com/api/v4/projects/2/labels", 38 | "events": "https://example.com/api/v4/projects/2/events", 39 | "members": "https://example.com/api/v4/projects/2/members", 40 | "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" 41 | }, 42 | "packages_enabled": true, 43 | "empty_repo": false, 44 | "archived": false, 45 | "visibility": "private", 46 | "resolve_outdated_diff_discussions": false, 47 | "container_expiration_policy": { 48 | "cadence": "1d", 49 | "enabled": false, 50 | "keep_n": 10, 51 | "older_than": "90d", 52 | "name_regex": ".*", 53 | "name_regex_keep": null, 54 | "next_run_at": "2020-11-27T17:23:39.927Z" 55 | }, 56 | "issues_enabled": true, 57 | "merge_requests_enabled": true, 58 | "wiki_enabled": true, 59 | "jobs_enabled": true, 60 | "snippets_enabled": true, 61 | "container_registry_enabled": true, 62 | "service_desk_enabled": true, 63 | "service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com", 64 | "can_create_merge_request_in": true, 65 | "issues_access_level": "enabled", 66 | "repository_access_level": "enabled", 67 | "merge_requests_access_level": "enabled", 68 | "forking_access_level": "enabled", 69 | "wiki_access_level": "enabled", 70 | "builds_access_level": "enabled", 71 | "snippets_access_level": "enabled", 72 | "pages_access_level": "private", 73 | "operations_access_level": "enabled", 74 | "analytics_access_level": "enabled", 75 | "container_registry_access_level": "enabled", 76 | "security_and_compliance_access_level": "private", 77 | "emails_disabled": null, 78 | "shared_runners_enabled": true, 79 | "lfs_enabled": true, 80 | "creator_id": 1803951, 81 | "import_url": null, 82 | "import_type": null, 83 | "import_status": "none", 84 | "open_issues_count": 0, 85 | "ci_default_git_depth": 50, 86 | "ci_forward_deployment_enabled": true, 87 | "ci_job_token_scope_enabled": false, 88 | "ci_separated_caches": true, 89 | "public_jobs": true, 90 | "build_timeout": 3600, 91 | "auto_cancel_pending_pipelines": "enabled", 92 | "build_coverage_regex": null, 93 | "ci_config_path": "", 94 | "shared_with_groups": [], 95 | "only_allow_merge_if_pipeline_succeeds": false, 96 | "allow_merge_on_skipped_pipeline": null, 97 | "restrict_user_defined_variables": false, 98 | "request_access_enabled": true, 99 | "only_allow_merge_if_all_discussions_are_resolved": false, 100 | "remove_source_branch_after_merge": true, 101 | "printing_merge_request_link_enabled": true, 102 | "merge_method": "merge", 103 | "squash_option": "default_off", 104 | "enforce_auth_checks_on_uploads": true, 105 | "suggestion_commit_message": null, 106 | "merge_commit_template": null, 107 | "squash_commit_template": null, 108 | "auto_devops_enabled": false, 109 | "auto_devops_deploy_strategy": "continuous", 110 | "autoclose_referenced_issues": true, 111 | "keep_latest_artifact": true, 112 | "runner_token_expiration_interval": null, 113 | "external_authorization_classification_label": "", 114 | "requirements_enabled": false, 115 | "requirements_access_level": "enabled", 116 | "security_and_compliance_enabled": true, 117 | "compliance_frameworks": [] 118 | }, 119 | { 120 | "id": 2, 121 | "description": "", 122 | "name": "myproject2", 123 | "name_with_namespace": "{{ namespace }} / myproject2", 124 | "path": "myproject2", 125 | "path_with_namespace": "{{ namespace }}/myproject2", 126 | "created_at": "2020-11-26T17:23:39.904Z", 127 | "default_branch": "master", 128 | "tag_list": [], 129 | "topics": [], 130 | "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git", 131 | "http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git", 132 | "web_url": "https://example.com/{{ namespace }}/myproject2", 133 | "readme_url": null, 134 | "avatar_url": null, 135 | "forks_count": 0, 136 | "star_count": 0, 137 | "last_activity_at": "2020-11-26T17:23:39.904Z", 138 | "namespace": { 139 | "id": 3, 140 | "name": "{{ namespace }}", 141 | "path": "{{ namespace }}", 142 | "kind": "group", 143 | "full_path": "{{ namespace }}", 144 | "parent_id": null, 145 | "avatar_url": "/uploads/-/system/group/avatar/5/x.png", 146 | "web_url": "https://example.com/groups/{{ namespace }}" 147 | }, 148 | "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2", 149 | "_links": { 150 | "self": "https://example.com/api/v4/projects/2", 151 | "issues": "https://example.com/api/v4/projects/2/issues", 152 | "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", 153 | "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", 154 | "labels": "https://example.com/api/v4/projects/2/labels", 155 | "events": "https://example.com/api/v4/projects/2/events", 156 | "members": "https://example.com/api/v4/projects/2/members", 157 | "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" 158 | }, 159 | "packages_enabled": true, 160 | "empty_repo": false, 161 | "archived": false, 162 | "visibility": "public", 163 | "resolve_outdated_diff_discussions": false, 164 | "container_expiration_policy": { 165 | "cadence": "1d", 166 | "enabled": false, 167 | "keep_n": 10, 168 | "older_than": "90d", 169 | "name_regex": ".*", 170 | "name_regex_keep": null, 171 | "next_run_at": "2020-11-27T17:23:39.927Z" 172 | }, 173 | "issues_enabled": true, 174 | "merge_requests_enabled": true, 175 | "wiki_enabled": true, 176 | "jobs_enabled": true, 177 | "snippets_enabled": true, 178 | "container_registry_enabled": true, 179 | "service_desk_enabled": true, 180 | "service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com", 181 | "can_create_merge_request_in": true, 182 | "issues_access_level": "enabled", 183 | "repository_access_level": "enabled", 184 | "merge_requests_access_level": "enabled", 185 | "forking_access_level": "enabled", 186 | "wiki_access_level": "enabled", 187 | "builds_access_level": "enabled", 188 | "snippets_access_level": "enabled", 189 | "pages_access_level": "private", 190 | "operations_access_level": "enabled", 191 | "analytics_access_level": "enabled", 192 | "container_registry_access_level": "enabled", 193 | "security_and_compliance_access_level": "private", 194 | "emails_disabled": null, 195 | "shared_runners_enabled": true, 196 | "lfs_enabled": true, 197 | "creator_id": 1803951, 198 | "import_url": null, 199 | "import_type": null, 200 | "import_status": "none", 201 | "open_issues_count": 0, 202 | "ci_default_git_depth": 50, 203 | "ci_forward_deployment_enabled": true, 204 | "ci_job_token_scope_enabled": false, 205 | "ci_separated_caches": true, 206 | "public_jobs": true, 207 | "build_timeout": 3600, 208 | "auto_cancel_pending_pipelines": "enabled", 209 | "build_coverage_regex": null, 210 | "ci_config_path": "", 211 | "shared_with_groups": [], 212 | "only_allow_merge_if_pipeline_succeeds": false, 213 | "allow_merge_on_skipped_pipeline": null, 214 | "restrict_user_defined_variables": false, 215 | "request_access_enabled": true, 216 | "only_allow_merge_if_all_discussions_are_resolved": false, 217 | "remove_source_branch_after_merge": true, 218 | "printing_merge_request_link_enabled": true, 219 | "merge_method": "merge", 220 | "squash_option": "default_off", 221 | "enforce_auth_checks_on_uploads": true, 222 | "suggestion_commit_message": null, 223 | "merge_commit_template": null, 224 | "squash_commit_template": null, 225 | "auto_devops_enabled": false, 226 | "auto_devops_deploy_strategy": "continuous", 227 | "autoclose_referenced_issues": true, 228 | "keep_latest_artifact": true, 229 | "runner_token_expiration_interval": null, 230 | "external_authorization_classification_label": "", 231 | "requirements_enabled": false, 232 | "requirements_access_level": "enabled", 233 | "security_and_compliance_enabled": true, 234 | "compliance_frameworks": [] 235 | } 236 | ] 237 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab_api_page_2.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 3, 4 | "description": "", 5 | "name": "myproject3", 6 | "name_with_namespace": "{{ namespace }} / myproject3", 7 | "path": "myproject3", 8 | "path_with_namespace": "{{ namespace }}/myproject3", 9 | "created_at": "2020-11-26T17:23:39.904Z", 10 | "default_branch": "master", 11 | "tag_list": [], 12 | "topics": [], 13 | "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git", 14 | "http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git", 15 | "web_url": "https://example.com/{{ namespace }}/myproject3", 16 | "readme_url": null, 17 | "avatar_url": null, 18 | "forks_count": 0, 19 | "star_count": 0, 20 | "last_activity_at": "2020-11-26T17:23:39.904Z", 21 | "namespace": { 22 | "id": 3, 23 | "name": "{{ namespace }}", 24 | "path": "{{ namespace }}", 25 | "kind": "group", 26 | "full_path": "{{ namespace }}", 27 | "parent_id": null, 28 | "avatar_url": "/uploads/-/system/group/avatar/5/x.png", 29 | "web_url": "https://example.com/groups/{{ namespace }}" 30 | }, 31 | "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3", 32 | "_links": { 33 | "self": "https://example.com/api/v4/projects/2", 34 | "issues": "https://example.com/api/v4/projects/2/issues", 35 | "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", 36 | "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", 37 | "labels": "https://example.com/api/v4/projects/2/labels", 38 | "events": "https://example.com/api/v4/projects/2/events", 39 | "members": "https://example.com/api/v4/projects/2/members", 40 | "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" 41 | }, 42 | "packages_enabled": true, 43 | "empty_repo": false, 44 | "archived": false, 45 | "visibility": "public", 46 | "resolve_outdated_diff_discussions": false, 47 | "container_expiration_policy": { 48 | "cadence": "1d", 49 | "enabled": false, 50 | "keep_n": 10, 51 | "older_than": "90d", 52 | "name_regex": ".*", 53 | "name_regex_keep": null, 54 | "next_run_at": "2020-11-27T17:23:39.927Z" 55 | }, 56 | "issues_enabled": true, 57 | "merge_requests_enabled": true, 58 | "wiki_enabled": true, 59 | "jobs_enabled": true, 60 | "snippets_enabled": true, 61 | "container_registry_enabled": true, 62 | "service_desk_enabled": true, 63 | "service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com", 64 | "can_create_merge_request_in": true, 65 | "issues_access_level": "enabled", 66 | "repository_access_level": "enabled", 67 | "merge_requests_access_level": "enabled", 68 | "forking_access_level": "enabled", 69 | "wiki_access_level": "enabled", 70 | "builds_access_level": "enabled", 71 | "snippets_access_level": "enabled", 72 | "pages_access_level": "private", 73 | "operations_access_level": "enabled", 74 | "analytics_access_level": "enabled", 75 | "container_registry_access_level": "enabled", 76 | "security_and_compliance_access_level": "private", 77 | "emails_disabled": null, 78 | "shared_runners_enabled": true, 79 | "lfs_enabled": true, 80 | "creator_id": 1803951, 81 | "import_url": null, 82 | "import_type": null, 83 | "import_status": "none", 84 | "open_issues_count": 0, 85 | "ci_default_git_depth": 50, 86 | "ci_forward_deployment_enabled": true, 87 | "ci_job_token_scope_enabled": false, 88 | "ci_separated_caches": true, 89 | "public_jobs": true, 90 | "build_timeout": 3600, 91 | "auto_cancel_pending_pipelines": "enabled", 92 | "build_coverage_regex": null, 93 | "ci_config_path": "", 94 | "shared_with_groups": [], 95 | "only_allow_merge_if_pipeline_succeeds": false, 96 | "allow_merge_on_skipped_pipeline": null, 97 | "restrict_user_defined_variables": false, 98 | "request_access_enabled": true, 99 | "only_allow_merge_if_all_discussions_are_resolved": false, 100 | "remove_source_branch_after_merge": true, 101 | "printing_merge_request_link_enabled": true, 102 | "merge_method": "merge", 103 | "squash_option": "default_off", 104 | "enforce_auth_checks_on_uploads": true, 105 | "suggestion_commit_message": null, 106 | "merge_commit_template": null, 107 | "squash_commit_template": null, 108 | "auto_devops_enabled": false, 109 | "auto_devops_deploy_strategy": "continuous", 110 | "autoclose_referenced_issues": true, 111 | "keep_latest_artifact": true, 112 | "runner_token_expiration_interval": null, 113 | "external_authorization_classification_label": "", 114 | "requirements_enabled": false, 115 | "requirements_access_level": "enabled", 116 | "security_and_compliance_enabled": true, 117 | "compliance_frameworks": [] 118 | } 119 | ] 120 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab_api_page_3.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 4, 4 | "description": "", 5 | "name": "myproject4", 6 | "name_with_namespace": "{{ namespace }} / myproject4", 7 | "path": "myproject4", 8 | "path_with_namespace": "{{ namespace }}/myproject4", 9 | "created_at": "2020-11-26T17:23:39.904Z", 10 | "default_branch": "master", 11 | "tag_list": [], 12 | "topics": [], 13 | "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git", 14 | "http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git", 15 | "web_url": "https://example.com/{{ namespace }}/myproject4", 16 | "readme_url": null, 17 | "avatar_url": null, 18 | "forks_count": 0, 19 | "star_count": 0, 20 | "last_activity_at": "2020-11-26T17:23:39.904Z", 21 | "namespace": { 22 | "id": 3, 23 | "name": "{{ namespace }}", 24 | "path": "{{ namespace }}", 25 | "kind": "group", 26 | "full_path": "{{ namespace }}", 27 | "parent_id": null, 28 | "avatar_url": "/uploads/-/system/group/avatar/5/x.png", 29 | "web_url": "https://example.com/groups/{{ namespace }}" 30 | }, 31 | "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4", 32 | "_links": { 33 | "self": "https://example.com/api/v4/projects/2", 34 | "issues": "https://example.com/api/v4/projects/2/issues", 35 | "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", 36 | "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", 37 | "labels": "https://example.com/api/v4/projects/2/labels", 38 | "events": "https://example.com/api/v4/projects/2/events", 39 | "members": "https://example.com/api/v4/projects/2/members", 40 | "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" 41 | }, 42 | "packages_enabled": true, 43 | "empty_repo": false, 44 | "archived": false, 45 | "visibility": "public", 46 | "resolve_outdated_diff_discussions": false, 47 | "container_expiration_policy": { 48 | "cadence": "1d", 49 | "enabled": false, 50 | "keep_n": 10, 51 | "older_than": "90d", 52 | "name_regex": ".*", 53 | "name_regex_keep": null, 54 | "next_run_at": "2020-11-27T17:23:39.927Z" 55 | }, 56 | "issues_enabled": true, 57 | "merge_requests_enabled": true, 58 | "wiki_enabled": true, 59 | "jobs_enabled": true, 60 | "snippets_enabled": true, 61 | "container_registry_enabled": true, 62 | "service_desk_enabled": true, 63 | "service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com", 64 | "can_create_merge_request_in": true, 65 | "issues_access_level": "enabled", 66 | "repository_access_level": "enabled", 67 | "merge_requests_access_level": "enabled", 68 | "forking_access_level": "enabled", 69 | "wiki_access_level": "enabled", 70 | "builds_access_level": "enabled", 71 | "snippets_access_level": "enabled", 72 | "pages_access_level": "private", 73 | "operations_access_level": "enabled", 74 | "analytics_access_level": "enabled", 75 | "container_registry_access_level": "enabled", 76 | "security_and_compliance_access_level": "private", 77 | "emails_disabled": null, 78 | "shared_runners_enabled": true, 79 | "lfs_enabled": true, 80 | "creator_id": 1803951, 81 | "import_url": null, 82 | "import_type": null, 83 | "import_status": "none", 84 | "open_issues_count": 0, 85 | "ci_default_git_depth": 50, 86 | "ci_forward_deployment_enabled": true, 87 | "ci_job_token_scope_enabled": false, 88 | "ci_separated_caches": true, 89 | "public_jobs": true, 90 | "build_timeout": 3600, 91 | "auto_cancel_pending_pipelines": "enabled", 92 | "build_coverage_regex": null, 93 | "ci_config_path": "", 94 | "shared_with_groups": [], 95 | "only_allow_merge_if_pipeline_succeeds": false, 96 | "allow_merge_on_skipped_pipeline": null, 97 | "restrict_user_defined_variables": false, 98 | "request_access_enabled": true, 99 | "only_allow_merge_if_all_discussions_are_resolved": false, 100 | "remove_source_branch_after_merge": true, 101 | "printing_merge_request_link_enabled": true, 102 | "merge_method": "merge", 103 | "squash_option": "default_off", 104 | "enforce_auth_checks_on_uploads": true, 105 | "suggestion_commit_message": null, 106 | "merge_commit_template": null, 107 | "squash_commit_template": null, 108 | "auto_devops_enabled": false, 109 | "auto_devops_deploy_strategy": "continuous", 110 | "autoclose_referenced_issues": true, 111 | "keep_latest_artifact": true, 112 | "runner_token_expiration_interval": null, 113 | "external_authorization_classification_label": "", 114 | "requirements_enabled": false, 115 | "requirements_access_level": "enabled", 116 | "security_and_compliance_enabled": true, 117 | "compliance_frameworks": [] 118 | } 119 | ] 120 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab_api_page_4.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 5, 4 | "description": "", 5 | "name": "myproject5", 6 | "name_with_namespace": "{{ namespace }} / myproject5", 7 | "path": "myproject5", 8 | "path_with_namespace": "{{ namespace }}/myproject5", 9 | "created_at": "2020-11-26T17:23:39.904Z", 10 | "default_branch": "master", 11 | "tag_list": [], 12 | "topics": [], 13 | "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git", 14 | "http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git", 15 | "web_url": "https://example.com/{{ namespace }}/myproject5", 16 | "readme_url": null, 17 | "avatar_url": null, 18 | "forks_count": 0, 19 | "star_count": 0, 20 | "last_activity_at": "2020-11-26T17:23:39.904Z", 21 | "namespace": { 22 | "id": 3, 23 | "name": "{{ namespace }}", 24 | "path": "{{ namespace }}", 25 | "kind": "group", 26 | "full_path": "{{ namespace }}", 27 | "parent_id": null, 28 | "avatar_url": "/uploads/-/system/group/avatar/5/x.png", 29 | "web_url": "https://example.com/groups/{{ namespace }}" 30 | }, 31 | "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5", 32 | "_links": { 33 | "self": "https://example.com/api/v4/projects/2", 34 | "issues": "https://example.com/api/v4/projects/2/issues", 35 | "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", 36 | "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", 37 | "labels": "https://example.com/api/v4/projects/2/labels", 38 | "events": "https://example.com/api/v4/projects/2/events", 39 | "members": "https://example.com/api/v4/projects/2/members", 40 | "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" 41 | }, 42 | "packages_enabled": true, 43 | "empty_repo": false, 44 | "archived": false, 45 | "visibility": "public", 46 | "resolve_outdated_diff_discussions": false, 47 | "container_expiration_policy": { 48 | "cadence": "1d", 49 | "enabled": false, 50 | "keep_n": 10, 51 | "older_than": "90d", 52 | "name_regex": ".*", 53 | "name_regex_keep": null, 54 | "next_run_at": "2020-11-27T17:23:39.927Z" 55 | }, 56 | "issues_enabled": true, 57 | "merge_requests_enabled": true, 58 | "wiki_enabled": true, 59 | "jobs_enabled": true, 60 | "snippets_enabled": true, 61 | "container_registry_enabled": true, 62 | "service_desk_enabled": true, 63 | "service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com", 64 | "can_create_merge_request_in": true, 65 | "issues_access_level": "enabled", 66 | "repository_access_level": "enabled", 67 | "merge_requests_access_level": "enabled", 68 | "forking_access_level": "enabled", 69 | "wiki_access_level": "enabled", 70 | "builds_access_level": "enabled", 71 | "snippets_access_level": "enabled", 72 | "pages_access_level": "private", 73 | "operations_access_level": "enabled", 74 | "analytics_access_level": "enabled", 75 | "container_registry_access_level": "enabled", 76 | "security_and_compliance_access_level": "private", 77 | "emails_disabled": null, 78 | "shared_runners_enabled": true, 79 | "lfs_enabled": true, 80 | "creator_id": 1803951, 81 | "import_url": null, 82 | "import_type": null, 83 | "import_status": "none", 84 | "open_issues_count": 0, 85 | "ci_default_git_depth": 50, 86 | "ci_forward_deployment_enabled": true, 87 | "ci_job_token_scope_enabled": false, 88 | "ci_separated_caches": true, 89 | "public_jobs": true, 90 | "build_timeout": 3600, 91 | "auto_cancel_pending_pipelines": "enabled", 92 | "build_coverage_regex": null, 93 | "ci_config_path": "", 94 | "shared_with_groups": [], 95 | "only_allow_merge_if_pipeline_succeeds": false, 96 | "allow_merge_on_skipped_pipeline": null, 97 | "restrict_user_defined_variables": false, 98 | "request_access_enabled": true, 99 | "only_allow_merge_if_all_discussions_are_resolved": false, 100 | "remove_source_branch_after_merge": true, 101 | "printing_merge_request_link_enabled": true, 102 | "merge_method": "merge", 103 | "squash_option": "default_off", 104 | "enforce_auth_checks_on_uploads": true, 105 | "suggestion_commit_message": null, 106 | "merge_commit_template": null, 107 | "squash_commit_template": null, 108 | "auto_devops_enabled": false, 109 | "auto_devops_deploy_strategy": "continuous", 110 | "autoclose_referenced_issues": true, 111 | "keep_latest_artifact": true, 112 | "runner_token_expiration_interval": null, 113 | "external_authorization_classification_label": "", 114 | "requirements_enabled": false, 115 | "requirements_access_level": "enabled", 116 | "security_and_compliance_enabled": true, 117 | "compliance_frameworks": [] 118 | } 119 | ] 120 | -------------------------------------------------------------------------------- /e2e_tests/docker-rest/flask/gitlab_api_user.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": 1, 3 | "username": "myuser1", 4 | "name": "My User", 5 | "state": "active", 6 | "avatar_url": "https://example.com/avatar", 7 | "web_url": "https://example.com/myuser1", 8 | "created_at": "2016-12-10T10:09:11.585Z", 9 | "bio": "", 10 | "location": "", 11 | "public_email": "", 12 | "skype": "", 13 | "linkedin": "", 14 | "twitter": "", 15 | "website_url": "", 16 | "organization": "", 17 | "job_title": "", 18 | "pronouns": "", 19 | "bot": false, 20 | "work_information": null, 21 | "followers": 0, 22 | "following": 0, 23 | "is_followed": false, 24 | "local_time": "11:59 PM", 25 | "last_sign_in_at": "2020-03-14T09:13:44.977Z", 26 | "confirmed_at": "2022-05-19T23:48:47.033Z", 27 | "last_activity_on": "2022-05-19", 28 | "email": "myuser1@example.com", 29 | "theme_id": null, 30 | "color_scheme_id": 1, 31 | "projects_limit": 100000, 32 | "current_sign_in_at": "2022-05-19T23:45:49.661Z", 33 | "identities": [], 34 | "can_create_group": true, 35 | "can_create_project": true, 36 | "two_factor_enabled": false, 37 | "external": false, 38 | "private_profile": false, 39 | "commit_email": "myuser1@example.com", 40 | "shared_runners_minutes_limit": 2000, 41 | "extra_shared_runners_minutes_limit": null 42 | } 43 | -------------------------------------------------------------------------------- /e2e_tests/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/debian:11.3 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y --no-install-recommends \ 5 | python3-pytest \ 6 | python3-toml \ 7 | python3-git \ 8 | python3-yaml \ 9 | && apt-get clean \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | WORKDIR /tests 13 | 14 | ENTRYPOINT ["/bin/sh", "-c", "--"] 15 | -------------------------------------------------------------------------------- /e2e_tests/test_basic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from helpers import grm 4 | 5 | 6 | def test_invalid_command(): 7 | cmd = grm(["whatever"], is_invalid=True) 8 | assert "usage" in cmd.stderr.lower() 9 | 10 | 11 | def test_help(): 12 | cmd = grm(["--help"]) 13 | assert "usage" in cmd.stdout.lower() 14 | -------------------------------------------------------------------------------- /e2e_tests/test_repos_status.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from helpers import RepoTree, grm 4 | 5 | 6 | def test_repos_sync_worktree_clone(): 7 | with RepoTree() as (root, config, repos): 8 | cmd = grm(["repos", "status", "--config", config]) 9 | assert cmd.returncode == 0 10 | for repo in repos: 11 | assert repo in cmd.stdout 12 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_clean.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | 5 | import pytest 6 | from helpers import ( 7 | NonGitDir, 8 | TempGitRepository, 9 | TempGitRepositoryWorktree, 10 | checksum_directory, 11 | funcname, 12 | grm, 13 | shell, 14 | ) 15 | 16 | 17 | def test_worktree_clean(): 18 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 19 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 20 | assert cmd.returncode == 0 21 | assert "test" in os.listdir(base_dir) 22 | 23 | cmd = grm(["wt", "clean"], cwd=base_dir) 24 | assert cmd.returncode == 0 25 | assert "test" not in os.listdir(base_dir) 26 | 27 | 28 | def test_worktree_clean_refusal_no_tracking_branch(): 29 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 30 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 31 | assert cmd.returncode == 0 32 | 33 | before = checksum_directory(f"{base_dir}/test") 34 | cmd = grm(["wt", "clean"], cwd=base_dir) 35 | assert cmd.returncode == 0 36 | assert "test" in os.listdir(base_dir) 37 | 38 | after = checksum_directory(f"{base_dir}/test") 39 | assert before == after 40 | 41 | 42 | def test_worktree_clean_refusal_uncommited_changes_new_file(): 43 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 44 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 45 | assert cmd.returncode == 0 46 | 47 | shell(f"cd {base_dir}/test && touch changed_file") 48 | 49 | before = checksum_directory(f"{base_dir}/test") 50 | cmd = grm(["wt", "clean"], cwd=base_dir) 51 | assert cmd.returncode == 0 52 | assert "test" in os.listdir(base_dir) 53 | 54 | after = checksum_directory(f"{base_dir}/test") 55 | assert before == after 56 | 57 | 58 | def test_worktree_clean_refusal_uncommited_changes_changed_file(): 59 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 60 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 61 | assert cmd.returncode == 0 62 | 63 | shell(f"cd {base_dir}/test && git ls-files | shuf | head | xargs rm -rf") 64 | 65 | before = checksum_directory(f"{base_dir}/test") 66 | cmd = grm(["wt", "clean"], cwd=base_dir) 67 | assert cmd.returncode == 0 68 | assert "test" in os.listdir(base_dir) 69 | 70 | after = checksum_directory(f"{base_dir}/test") 71 | assert before == after 72 | 73 | 74 | def test_worktree_clean_refusal_uncommited_changes_cleand_file(): 75 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 76 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 77 | assert cmd.returncode == 0 78 | 79 | shell( 80 | f"cd {base_dir}/test && git ls-files | shuf | head | while read f ; do echo $RANDOM > $f ; done" 81 | ) 82 | 83 | before = checksum_directory(f"{base_dir}/test") 84 | cmd = grm(["wt", "clean"], cwd=base_dir) 85 | assert cmd.returncode == 0 86 | assert "test" in os.listdir(base_dir) 87 | 88 | after = checksum_directory(f"{base_dir}/test") 89 | assert before == after 90 | 91 | 92 | def test_worktree_clean_refusal_commited_changes(): 93 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 94 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 95 | assert cmd.returncode == 0 96 | 97 | shell( 98 | f'cd {base_dir}/test && touch changed_file && git add changed_file && git commit -m "commitmsg"' 99 | ) 100 | 101 | before = checksum_directory(f"{base_dir}/test") 102 | cmd = grm(["wt", "clean"], cwd=base_dir) 103 | assert cmd.returncode == 0 104 | assert "test" in os.listdir(base_dir) 105 | 106 | after = checksum_directory(f"{base_dir}/test") 107 | assert before == after 108 | 109 | 110 | def test_worktree_clean_refusal_tracking_branch_mismatch(): 111 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 112 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 113 | assert cmd.returncode == 0 114 | 115 | shell( 116 | f"cd {base_dir}/test && git push origin test && git reset --hard origin/test^" 117 | ) 118 | 119 | before = checksum_directory(f"{base_dir}/test") 120 | cmd = grm(["wt", "clean"], cwd=base_dir) 121 | assert cmd.returncode == 0 122 | assert "test" in os.listdir(base_dir) 123 | 124 | after = checksum_directory(f"{base_dir}/test") 125 | assert before == after 126 | 127 | 128 | def test_worktree_clean_fail_from_subdir(): 129 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 130 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 131 | assert cmd.returncode == 0 132 | 133 | cmd = grm(["wt", "clean"], cwd=f"{base_dir}/test") 134 | assert cmd.returncode != 0 135 | assert len(cmd.stdout) == 0 136 | assert len(cmd.stderr) != 0 137 | 138 | 139 | def test_worktree_clean_non_worktree(): 140 | with TempGitRepository() as git_dir: 141 | cmd = grm(["wt", "clean"], cwd=git_dir) 142 | assert cmd.returncode != 0 143 | assert len(cmd.stdout) == 0 144 | assert len(cmd.stderr) != 0 145 | 146 | 147 | def test_worktree_clean_non_git(): 148 | with NonGitDir() as base_dir: 149 | cmd = grm(["wt", "clean"], cwd=base_dir) 150 | assert cmd.returncode != 0 151 | assert len(cmd.stdout) == 0 152 | assert len(cmd.stderr) != 0 153 | 154 | 155 | @pytest.mark.parametrize("configure_default_branch", [True, False]) 156 | @pytest.mark.parametrize("branch_list_empty", [True, False]) 157 | def test_worktree_clean_configured_default_branch( 158 | configure_default_branch, branch_list_empty 159 | ): 160 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 161 | if configure_default_branch: 162 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 163 | if branch_list_empty: 164 | f.write( 165 | """ 166 | persistent_branches = [] 167 | """ 168 | ) 169 | else: 170 | f.write( 171 | """ 172 | persistent_branches = [ 173 | "mybranch" 174 | ] 175 | """ 176 | ) 177 | 178 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 179 | assert cmd.returncode == 0 180 | 181 | shell( 182 | f""" 183 | cd {base_dir} 184 | ( 185 | cd ./test 186 | touch change 187 | git add change 188 | git commit -m commit 189 | ) 190 | 191 | git --git-dir ./.git-main-working-tree worktree add mybranch 192 | ( 193 | cd ./mybranch 194 | git merge --no-ff test 195 | ) 196 | git --git-dir ./.git-main-working-tree worktree remove mybranch 197 | """ 198 | ) 199 | 200 | cmd = grm(["wt", "clean"], cwd=base_dir) 201 | assert cmd.returncode == 0 202 | if configure_default_branch and not branch_list_empty: 203 | assert "test" not in os.listdir(base_dir) 204 | else: 205 | assert "test" in os.listdir(base_dir) 206 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_config_presistent_branch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os.path 4 | 5 | import git 6 | from helpers import TempGitRepositoryWorktree, checksum_directory, funcname, grm, shell 7 | 8 | 9 | def test_worktree_never_clean_persistent_branches(): 10 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 11 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 12 | f.write( 13 | """ 14 | persistent_branches = [ 15 | "mybranch", 16 | ] 17 | """ 18 | ) 19 | 20 | cmd = grm(["wt", "add", "mybranch", "--track", "origin/master"], cwd=base_dir) 21 | assert cmd.returncode == 0 22 | 23 | before = checksum_directory(f"{base_dir}/mybranch") 24 | 25 | cmd = grm(["wt", "clean"], cwd=base_dir) 26 | assert cmd.returncode == 0 27 | 28 | assert "mybranch" in os.listdir(base_dir) 29 | repo = git.Repo(os.path.join(base_dir, "mybranch")) 30 | assert str(repo.active_branch) == "mybranch" 31 | 32 | after = checksum_directory(f"{base_dir}/mybranch") 33 | assert before == after 34 | 35 | 36 | def test_worktree_clean_branch_merged_into_persistent(): 37 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 38 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 39 | f.write( 40 | """ 41 | persistent_branches = [ 42 | "master", 43 | ] 44 | """ 45 | ) 46 | 47 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 48 | assert cmd.returncode == 0 49 | 50 | shell( 51 | f""" 52 | cd {base_dir}/test 53 | touch change1 54 | git add change1 55 | git commit -m "commit1" 56 | """ 57 | ) 58 | 59 | cmd = grm(["wt", "add", "master"], cwd=base_dir) 60 | assert cmd.returncode == 0 61 | 62 | shell( 63 | f""" 64 | cd {base_dir}/master 65 | git merge --no-ff test 66 | """ 67 | ) 68 | 69 | cmd = grm(["wt", "clean"], cwd=base_dir) 70 | assert cmd.returncode == 0 71 | 72 | assert "test" not in os.listdir(base_dir) 73 | 74 | 75 | def test_worktree_no_clean_unmerged_branch(): 76 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 77 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 78 | f.write( 79 | """ 80 | persistent_branches = [ 81 | "master", 82 | ] 83 | """ 84 | ) 85 | 86 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 87 | assert cmd.returncode == 0 88 | 89 | shell( 90 | f""" 91 | cd {base_dir}/test 92 | touch change1 93 | git add change1 94 | git commit -m "commit1" 95 | git push origin test 96 | """ 97 | ) 98 | 99 | cmd = grm(["wt", "add", "master"], cwd=base_dir) 100 | assert cmd.returncode == 0 101 | 102 | cmd = grm(["wt", "clean"], cwd=base_dir) 103 | assert cmd.returncode == 0 104 | 105 | assert "test" in os.listdir(base_dir) 106 | 107 | 108 | def test_worktree_delete_branch_merged_into_persistent(): 109 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 110 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 111 | f.write( 112 | """ 113 | persistent_branches = [ 114 | "master", 115 | ] 116 | """ 117 | ) 118 | 119 | cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) 120 | assert cmd.returncode == 0 121 | 122 | shell( 123 | f""" 124 | cd {base_dir}/test 125 | touch change1 126 | git add change1 127 | git commit -m "commit1" 128 | """ 129 | ) 130 | 131 | cmd = grm(["wt", "add", "master"], cwd=base_dir) 132 | assert cmd.returncode == 0 133 | 134 | shell( 135 | f""" 136 | cd {base_dir}/master 137 | git merge --no-ff test 138 | """ 139 | ) 140 | 141 | cmd = grm(["wt", "delete", "test"], cwd=base_dir) 142 | assert cmd.returncode == 0 143 | 144 | assert "test" not in os.listdir(base_dir) 145 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_conversion.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | 5 | from helpers import ( 6 | EmptyDir, 7 | NonGitDir, 8 | TempGitRepository, 9 | TempGitRepositoryWorktree, 10 | checksum_directory, 11 | funcname, 12 | grm, 13 | ) 14 | 15 | 16 | def test_convert(): 17 | with TempGitRepository() as git_dir: 18 | cmd = grm(["wt", "convert"], cwd=git_dir) 19 | assert cmd.returncode == 0 20 | 21 | files = os.listdir(git_dir) 22 | assert len(files) == 1 23 | assert files[0] == ".git-main-working-tree" 24 | 25 | cmd = grm(["wt", "add", "test"], cwd=git_dir) 26 | assert cmd.returncode == 0 27 | 28 | files = os.listdir(git_dir) 29 | assert len(files) == 2 30 | assert set(files) == {".git-main-working-tree", "test"} 31 | 32 | 33 | def test_convert_already_worktree(): 34 | with TempGitRepositoryWorktree.get(funcname()) as (git_dir, _commit): 35 | before = checksum_directory(git_dir) 36 | 37 | cmd = grm(["wt", "convert"], cwd=git_dir) 38 | assert cmd.returncode != 0 39 | 40 | after = checksum_directory(git_dir) 41 | assert before == after 42 | 43 | 44 | def test_convert_non_git(): 45 | with NonGitDir() as dir: 46 | before = checksum_directory(dir) 47 | 48 | cmd = grm(["wt", "convert"], cwd=dir) 49 | assert cmd.returncode != 0 50 | 51 | after = checksum_directory(dir) 52 | assert before == after 53 | 54 | 55 | def test_convert_empty(): 56 | with EmptyDir() as dir: 57 | before = checksum_directory(dir) 58 | 59 | cmd = grm(["wt", "convert"], cwd=dir) 60 | assert cmd.returncode != 0 61 | 62 | after = checksum_directory(dir) 63 | assert before == after 64 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_fetch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import re 4 | 5 | import git 6 | import pytest 7 | from helpers import ( 8 | EmptyDir, 9 | TempGitFileRemote, 10 | TempGitRepositoryWorktree, 11 | funcname, 12 | grm, 13 | shell, 14 | ) 15 | 16 | 17 | def test_worktree_fetch(): 18 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit): 19 | with TempGitFileRemote() as (remote_path, _remote_sha): 20 | shell( 21 | f""" 22 | cd {base_dir} 23 | git --git-dir .git-main-working-tree remote add upstream file://{remote_path} 24 | git --git-dir .git-main-working-tree push --force upstream master:master 25 | """ 26 | ) 27 | 28 | cmd = grm(["wt", "fetch"], cwd=base_dir) 29 | assert cmd.returncode == 0 30 | 31 | repo = git.Repo(f"{base_dir}/.git-main-working-tree") 32 | assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha 33 | assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha 34 | 35 | with EmptyDir() as tmp: 36 | shell( 37 | f""" 38 | cd {tmp} 39 | git clone {remote_path} tmp 40 | cd tmp 41 | echo change > mychange-remote 42 | git add mychange-remote 43 | git commit -m "change-remote" 44 | git push origin HEAD:master 45 | """ 46 | ) 47 | remote_commit = git.Repo(f"{tmp}/tmp").commit("master").hexsha 48 | 49 | assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha 50 | assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha 51 | 52 | cmd = grm(["wt", "fetch"], cwd=base_dir) 53 | assert cmd.returncode == 0 54 | 55 | assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha 56 | assert repo.commit("master").hexsha == root_commit 57 | assert repo.commit("upstream/master").hexsha == remote_commit 58 | 59 | 60 | @pytest.mark.parametrize("rebase", [True, False]) 61 | @pytest.mark.parametrize("ffable", [True, False]) 62 | @pytest.mark.parametrize("has_changes", [True, False]) 63 | @pytest.mark.parametrize("stash", [True, False]) 64 | def test_worktree_pull(rebase, ffable, has_changes, stash): 65 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit): 66 | with TempGitFileRemote() as (remote_path, _remote_sha): 67 | shell( 68 | f""" 69 | cd {base_dir} 70 | git --git-dir .git-main-working-tree remote add upstream file://{remote_path} 71 | git --git-dir .git-main-working-tree push --force upstream master:master 72 | """ 73 | ) 74 | 75 | repo = git.Repo(f"{base_dir}/.git-main-working-tree") 76 | assert repo.commit("origin/master").hexsha == repo.commit("master").hexsha 77 | assert repo.commit("upstream/master").hexsha == repo.commit("master").hexsha 78 | 79 | with EmptyDir() as tmp: 80 | shell( 81 | f""" 82 | cd {tmp} 83 | git clone {remote_path} tmp 84 | cd tmp 85 | git checkout origin/master 86 | echo change > mychange-remote 87 | git add mychange-remote 88 | git commit -m "change-remote" 89 | git push origin HEAD:master 90 | """ 91 | ) 92 | remote_commit = git.Repo(f"{tmp}/tmp").commit("HEAD").hexsha 93 | 94 | grm(["wt", "add", "master", "--track", "upstream/master"], cwd=base_dir) 95 | 96 | repo = git.Repo(f"{base_dir}/master") 97 | if not ffable: 98 | shell( 99 | f""" 100 | cd {base_dir}/master 101 | echo change > mychange 102 | git add mychange 103 | git commit -m "local-commit-in-master" 104 | """ 105 | ) 106 | 107 | if has_changes: 108 | shell( 109 | f""" 110 | cd {base_dir}/master 111 | echo change >> root-commit-in-worktree-1 112 | echo uncommitedchange > uncommitedchange 113 | """ 114 | ) 115 | 116 | args = ["wt", "pull"] 117 | if rebase: 118 | args += ["--rebase"] 119 | if stash: 120 | args += ["--stash"] 121 | cmd = grm(args, cwd=base_dir) 122 | if has_changes and not stash: 123 | assert cmd.returncode != 0 124 | assert re.match(r".*master.*contains changes.*", cmd.stderr) 125 | else: 126 | assert repo.commit("upstream/master").hexsha == remote_commit 127 | assert repo.commit("origin/master").hexsha == root_commit 128 | assert ( 129 | repo.commit("master").hexsha 130 | != repo.commit("origin/master").hexsha 131 | ) 132 | if has_changes: 133 | assert ["uncommitedchange"] == repo.untracked_files 134 | assert repo.is_dirty() 135 | else: 136 | assert not repo.is_dirty() 137 | 138 | if not rebase: 139 | if ffable: 140 | assert cmd.returncode == 0 141 | assert ( 142 | repo.commit("master").hexsha 143 | != repo.commit("origin/master").hexsha 144 | ) 145 | assert ( 146 | repo.commit("master").hexsha 147 | == repo.commit("upstream/master").hexsha 148 | ) 149 | assert ( 150 | repo.commit("upstream/master").hexsha == remote_commit 151 | ) 152 | else: 153 | assert cmd.returncode != 0 154 | assert "cannot be fast forwarded" in cmd.stderr 155 | assert ( 156 | repo.commit("master").hexsha 157 | != repo.commit("origin/master").hexsha 158 | ) 159 | assert repo.commit("master").hexsha != remote_commit 160 | assert ( 161 | repo.commit("upstream/master").hexsha == remote_commit 162 | ) 163 | else: 164 | assert cmd.returncode == 0 165 | if ffable: 166 | assert ( 167 | repo.commit("master").hexsha 168 | != repo.commit("origin/master").hexsha 169 | ) 170 | assert ( 171 | repo.commit("master").hexsha 172 | == repo.commit("upstream/master").hexsha 173 | ) 174 | assert ( 175 | repo.commit("upstream/master").hexsha == remote_commit 176 | ) 177 | else: 178 | assert ( 179 | repo.commit("master").message.strip() 180 | == "local-commit-in-master" 181 | ) 182 | assert repo.commit("master~1").hexsha == remote_commit 183 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_rebase.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import re 5 | 6 | import git 7 | import pytest 8 | from helpers import TempGitRepositoryWorktree, funcname, grm, shell 9 | 10 | 11 | @pytest.mark.parametrize("pull", [True, False]) 12 | @pytest.mark.parametrize("rebase", [True, False]) 13 | @pytest.mark.parametrize("ffable", [True, False]) 14 | @pytest.mark.parametrize("has_changes", [True, False]) 15 | @pytest.mark.parametrize("stash", [True, False]) 16 | def test_worktree_rebase(pull, rebase, ffable, has_changes, stash): 17 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _root_commit): 18 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 19 | f.write('persistent_branches = ["mybasebranch"]') 20 | 21 | repo = git.Repo(f"{base_dir}/.git-main-working-tree") 22 | 23 | grm( 24 | ["wt", "add", "mybasebranch", "--track", "origin/mybasebranch"], 25 | cwd=base_dir, 26 | ) 27 | 28 | shell( 29 | f""" 30 | cd {base_dir}/mybasebranch 31 | echo change > mychange-root 32 | git add mychange-root 33 | git commit -m "commit-root" 34 | echo change > mychange-base-local 35 | git add mychange-base-local 36 | git commit -m "commit-in-base-local" 37 | git push origin mybasebranch 38 | """ 39 | ) 40 | 41 | grm( 42 | ["wt", "add", "myfeatbranch", "--track", "origin/myfeatbranch"], 43 | cwd=base_dir, 44 | ) 45 | shell( 46 | f""" 47 | cd {base_dir}/myfeatbranch 48 | git reset --hard mybasebranch^ # root 49 | echo change > mychange-feat-local 50 | git add mychange-feat-local 51 | git commit -m "commit-in-feat-local" 52 | git push origin HEAD:myfeatbranch 53 | """ 54 | ) 55 | 56 | grm(["wt", "add", "tmp"], cwd=base_dir) 57 | shell( 58 | f""" 59 | cd {base_dir}/tmp 60 | git reset --hard mybasebranch 61 | echo change > mychange-base-remote 62 | git add mychange-base-remote 63 | git commit -m "commit-in-base-remote" 64 | git push origin HEAD:mybasebranch 65 | 66 | git reset --hard myfeatbranch 67 | echo change > mychange-feat-remote 68 | git add mychange-feat-remote 69 | git commit -m "commit-in-feat-remote" 70 | git push origin HEAD:myfeatbranch 71 | """ 72 | ) 73 | 74 | if not ffable: 75 | shell( 76 | f""" 77 | cd {base_dir}/mybasebranch 78 | echo change > mychange-base-no-ff 79 | git add mychange-base-no-ff 80 | git commit -m "commit-in-base-local-no-ff" 81 | 82 | cd {base_dir}/myfeatbranch 83 | echo change > mychange-feat-no-ff 84 | git add mychange-feat-no-ff 85 | git commit -m "commit-in-feat-local-no-ff" 86 | """ 87 | ) 88 | 89 | if has_changes: 90 | shell( 91 | f""" 92 | cd {base_dir}/myfeatbranch 93 | echo uncommitedchange > uncommitedchange 94 | """ 95 | ) 96 | 97 | grm(["wt", "delete", "--force", "tmp"], cwd=base_dir) 98 | 99 | repo = git.Repo(f"{base_dir}/.git-main-working-tree") 100 | if ffable: 101 | assert repo.commit("mybasebranch~1").message.strip() == "commit-root" 102 | assert ( 103 | repo.refs.mybasebranch.commit.message.strip() == "commit-in-base-local" 104 | ) 105 | assert ( 106 | repo.remote("origin").refs.mybasebranch.commit.message.strip() 107 | == "commit-in-base-remote" 108 | ) 109 | assert ( 110 | repo.refs.myfeatbranch.commit.message.strip() == "commit-in-feat-local" 111 | ) 112 | assert ( 113 | repo.remote("origin").refs.myfeatbranch.commit.message.strip() 114 | == "commit-in-feat-remote" 115 | ) 116 | else: 117 | assert ( 118 | repo.commit("mybasebranch").message.strip() 119 | == "commit-in-base-local-no-ff" 120 | ) 121 | assert ( 122 | repo.commit("mybasebranch~1").message.strip() == "commit-in-base-local" 123 | ) 124 | assert repo.commit("mybasebranch~2").message.strip() == "commit-root" 125 | assert ( 126 | repo.commit("myfeatbranch").message.strip() 127 | == "commit-in-feat-local-no-ff" 128 | ) 129 | assert ( 130 | repo.commit("myfeatbranch~1").message.strip() == "commit-in-feat-local" 131 | ) 132 | assert repo.commit("myfeatbranch~2").message.strip() == "commit-root" 133 | assert ( 134 | repo.remote("origin").refs.mybasebranch.commit.message.strip() 135 | == "commit-in-base-remote" 136 | ) 137 | assert ( 138 | repo.remote("origin").refs.myfeatbranch.commit.message.strip() 139 | == "commit-in-feat-remote" 140 | ) 141 | 142 | args = ["wt", "rebase"] 143 | if pull: 144 | args += ["--pull"] 145 | if rebase: 146 | args += ["--rebase"] 147 | if stash: 148 | args += ["--stash"] 149 | cmd = grm(args, cwd=base_dir) 150 | 151 | if rebase and not pull: 152 | assert cmd.returncode != 0 153 | assert len(cmd.stderr) != 0 154 | elif has_changes and not stash: 155 | assert cmd.returncode != 0 156 | assert re.search(r".*myfeatbranch.*contains changes.*", cmd.stderr) 157 | else: 158 | repo = git.Repo(f"{base_dir}/myfeatbranch") 159 | if has_changes: 160 | assert ["uncommitedchange"] == repo.untracked_files 161 | if pull: 162 | if rebase: 163 | assert cmd.returncode == 0 164 | if ffable: 165 | assert ( 166 | repo.commit("HEAD").message.strip() 167 | == "commit-in-feat-remote" 168 | ) 169 | assert ( 170 | repo.commit("HEAD~1").message.strip() 171 | == "commit-in-feat-local" 172 | ) 173 | assert ( 174 | repo.commit("HEAD~2").message.strip() 175 | == "commit-in-base-remote" 176 | ) 177 | assert ( 178 | repo.commit("HEAD~3").message.strip() 179 | == "commit-in-base-local" 180 | ) 181 | assert repo.commit("HEAD~4").message.strip() == "commit-root" 182 | else: 183 | assert ( 184 | repo.commit("HEAD").message.strip() 185 | == "commit-in-feat-local-no-ff" 186 | ) 187 | assert ( 188 | repo.commit("HEAD~1").message.strip() 189 | == "commit-in-feat-remote" 190 | ) 191 | assert ( 192 | repo.commit("HEAD~2").message.strip() 193 | == "commit-in-feat-local" 194 | ) 195 | assert ( 196 | repo.commit("HEAD~3").message.strip() 197 | == "commit-in-base-local-no-ff" 198 | ) 199 | assert ( 200 | repo.commit("HEAD~4").message.strip() 201 | == "commit-in-base-remote" 202 | ) 203 | assert ( 204 | repo.commit("HEAD~5").message.strip() 205 | == "commit-in-base-local" 206 | ) 207 | assert repo.commit("HEAD~6").message.strip() == "commit-root" 208 | else: 209 | if ffable: 210 | assert cmd.returncode == 0 211 | assert ( 212 | repo.commit("HEAD").message.strip() 213 | == "commit-in-feat-remote" 214 | ) 215 | assert ( 216 | repo.commit("HEAD~1").message.strip() 217 | == "commit-in-feat-local" 218 | ) 219 | assert ( 220 | repo.commit("HEAD~2").message.strip() 221 | == "commit-in-base-remote" 222 | ) 223 | assert ( 224 | repo.commit("HEAD~3").message.strip() 225 | == "commit-in-base-local" 226 | ) 227 | assert repo.commit("HEAD~4").message.strip() == "commit-root" 228 | else: 229 | assert cmd.returncode != 0 230 | assert ( 231 | repo.commit("HEAD").message.strip() 232 | == "commit-in-feat-local-no-ff" 233 | ) 234 | assert ( 235 | repo.commit("HEAD~1").message.strip() 236 | == "commit-in-feat-local" 237 | ) 238 | assert ( 239 | repo.commit("HEAD~2").message.strip() 240 | == "commit-in-base-local-no-ff" 241 | ) 242 | assert ( 243 | repo.commit("HEAD~3").message.strip() 244 | == "commit-in-base-local" 245 | ) 246 | assert repo.commit("HEAD~4").message.strip() == "commit-root" 247 | else: 248 | assert cmd.returncode == 0 249 | if ffable: 250 | assert repo.commit("HEAD").message.strip() == "commit-in-feat-local" 251 | assert ( 252 | repo.commit("HEAD~1").message.strip() == "commit-in-base-local" 253 | ) 254 | assert repo.commit("HEAD~2").message.strip() == "commit-root" 255 | else: 256 | assert ( 257 | repo.commit("HEAD").message.strip() 258 | == "commit-in-feat-local-no-ff" 259 | ) 260 | assert ( 261 | repo.commit("HEAD~1").message.strip() == "commit-in-feat-local" 262 | ) 263 | assert ( 264 | repo.commit("HEAD~2").message.strip() 265 | == "commit-in-base-local-no-ff" 266 | ) 267 | assert ( 268 | repo.commit("HEAD~3").message.strip() == "commit-in-base-local" 269 | ) 270 | assert repo.commit("HEAD~4").message.strip() == "commit-root" 271 | -------------------------------------------------------------------------------- /e2e_tests/test_worktree_status.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import re 5 | 6 | import pytest 7 | from helpers import ( 8 | NonGitDir, 9 | TempGitRepository, 10 | TempGitRepositoryWorktree, 11 | funcname, 12 | grm, 13 | shell, 14 | ) 15 | 16 | 17 | @pytest.mark.parametrize("has_config", [True, False]) 18 | def test_worktree_status(has_config): 19 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 20 | if has_config: 21 | with open(os.path.join(base_dir, "grm.toml"), "w") as f: 22 | f.write("") 23 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 24 | assert cmd.returncode == 0 25 | 26 | cmd = grm(["wt", "status"], cwd=base_dir) 27 | assert cmd.returncode == 0 28 | assert len(cmd.stderr) == 0 29 | stdout = cmd.stdout.lower() 30 | assert "test" in stdout 31 | 32 | 33 | def test_worktree_status_fail_from_subdir(): 34 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 35 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 36 | assert cmd.returncode == 0 37 | 38 | cmd = grm(["wt", "status"], cwd=f"{base_dir}/test") 39 | assert cmd.returncode != 0 40 | assert len(cmd.stdout) == 0 41 | assert len(cmd.stderr) != 0 42 | 43 | 44 | def test_worktree_status_non_worktree(): 45 | with TempGitRepository() as git_dir: 46 | cmd = grm(["wt", "status"], cwd=git_dir) 47 | assert cmd.returncode != 0 48 | assert len(cmd.stdout) == 0 49 | assert len(cmd.stderr) != 0 50 | 51 | 52 | def test_worktree_status_non_git(): 53 | with NonGitDir() as base_dir: 54 | cmd = grm(["wt", "status"], cwd=base_dir) 55 | assert cmd.returncode != 0 56 | assert len(cmd.stdout) == 0 57 | assert len(cmd.stderr) != 0 58 | 59 | 60 | def test_worktree_status_warn_with_non_worktree_dir(): 61 | with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): 62 | cmd = grm(["wt", "add", "test"], cwd=base_dir) 63 | assert cmd.returncode == 0 64 | 65 | shell( 66 | f""" 67 | cd {base_dir} 68 | mkdir not_a_worktree 69 | """ 70 | ) 71 | 72 | cmd = grm(["wt", "status"], cwd=base_dir) 73 | 74 | assert cmd.returncode == 0 75 | assert len(cmd.stdout) != 0 76 | assert len(cmd.stderr) != 0 77 | assert ( 78 | re.match( 79 | ".*error.*not_a_worktree.*not a valid worktree directory", 80 | cmd.stderr, 81 | re.IGNORECASE, 82 | ) 83 | is not None 84 | ) 85 | -------------------------------------------------------------------------------- /example.config.toml: -------------------------------------------------------------------------------- 1 | [[trees]] 2 | root = "~/example-projects/" 3 | 4 | [[trees.repos]] 5 | name = "git-repo-manager" 6 | 7 | [[trees.repos.remotes]] 8 | name = "origin" 9 | url = "https://code.hkoerber.de/hannes/git-repo-manager.git" 10 | type = "https" 11 | 12 | [[trees.repos.remotes]] 13 | name = "github" 14 | url = "https://github.com/hakoerber/git-repo-manager.git" 15 | type = "https" 16 | 17 | 18 | [[trees.repos]] 19 | name = "dotfiles" 20 | 21 | [[trees.repos.remotes]] 22 | name = "origin" 23 | url = "https://github.com/hakoerber/dotfiles.git" 24 | type = "https" 25 | -------------------------------------------------------------------------------- /example.config.yaml: -------------------------------------------------------------------------------- 1 | trees: 2 | - root: "~/example-projects/" 3 | repos: 4 | - name: "git-repo-manager" 5 | remotes: 6 | - name: "origin" 7 | url: "https://code.hkoerber.de/hannes/git-repo-manager.git" 8 | type: "https" 9 | - name: "github" 10 | url: "https://github.com/hakoerber/git-repo-manager.git" 11 | type: "https" 12 | - name: "dotfiles" 13 | remotes: 14 | - name: "origin" 15 | url: "https://github.com/hakoerber/dotfiles.git" 16 | type: "https" 17 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "crane": { 4 | "inputs": { 5 | "nixpkgs": [ 6 | "nixpkgs" 7 | ] 8 | }, 9 | "locked": { 10 | "lastModified": 1724006180, 11 | "narHash": "sha256-PVxPj0Ga2fMYMtcT9ARCthF+4U71YkOT7ZjgD/vf1Aw=", 12 | "owner": "ipetkov", 13 | "repo": "crane", 14 | "rev": "7ce92819802bc583b7e82ebc08013a530f22209f", 15 | "type": "github" 16 | }, 17 | "original": { 18 | "owner": "ipetkov", 19 | "repo": "crane", 20 | "type": "github" 21 | } 22 | }, 23 | "flake-utils": { 24 | "inputs": { 25 | "systems": "systems" 26 | }, 27 | "locked": { 28 | "lastModified": 1710146030, 29 | "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", 30 | "owner": "numtide", 31 | "repo": "flake-utils", 32 | "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", 33 | "type": "github" 34 | }, 35 | "original": { 36 | "owner": "numtide", 37 | "repo": "flake-utils", 38 | "type": "github" 39 | } 40 | }, 41 | "nixpkgs": { 42 | "locked": { 43 | "lastModified": 1738435198, 44 | "narHash": "sha256-5+Hmo4nbqw8FrW85FlNm4IIrRnZ7bn0cmXlScNsNRLo=", 45 | "owner": "NixOS", 46 | "repo": "nixpkgs", 47 | "rev": "f6687779bf4c396250831aa5a32cbfeb85bb07a3", 48 | "type": "github" 49 | }, 50 | "original": { 51 | "owner": "NixOS", 52 | "ref": "nixos-24.11", 53 | "repo": "nixpkgs", 54 | "type": "github" 55 | } 56 | }, 57 | "root": { 58 | "inputs": { 59 | "crane": "crane", 60 | "flake-utils": "flake-utils", 61 | "nixpkgs": "nixpkgs", 62 | "rust-overlay": "rust-overlay" 63 | } 64 | }, 65 | "rust-overlay": { 66 | "inputs": { 67 | "nixpkgs": [ 68 | "nixpkgs" 69 | ] 70 | }, 71 | "locked": { 72 | "lastModified": 1724293269, 73 | "narHash": "sha256-x/XhOAszT/ejditCHUtGOjQcVg2AQhrC/QVew3i7kTI=", 74 | "owner": "oxalica", 75 | "repo": "rust-overlay", 76 | "rev": "6dc6d34a3a217457d7044dcce32b6d537480a6a1", 77 | "type": "github" 78 | }, 79 | "original": { 80 | "owner": "oxalica", 81 | "repo": "rust-overlay", 82 | "type": "github" 83 | } 84 | }, 85 | "systems": { 86 | "locked": { 87 | "lastModified": 1681028828, 88 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 89 | "owner": "nix-systems", 90 | "repo": "default", 91 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 92 | "type": "github" 93 | }, 94 | "original": { 95 | "owner": "nix-systems", 96 | "repo": "default", 97 | "type": "github" 98 | } 99 | } 100 | }, 101 | "root": "root", 102 | "version": 7 103 | } 104 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "git-repo-manager"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11"; 6 | flake-utils.url = "github:numtide/flake-utils"; 7 | 8 | crane = { 9 | url = "github:ipetkov/crane"; 10 | inputs.nixpkgs.follows = "nixpkgs"; 11 | }; 12 | rust-overlay = { 13 | url = "github:oxalica/rust-overlay"; 14 | inputs.nixpkgs.follows = "nixpkgs"; 15 | }; 16 | }; 17 | 18 | outputs = { 19 | self, 20 | nixpkgs, 21 | flake-utils, 22 | crane, 23 | rust-overlay, 24 | }: 25 | { 26 | overlays = { 27 | git-repo-manager = final: prev: { 28 | git-repo-manager = self.packages.${prev.stdenv.system}.default; 29 | }; 30 | }; 31 | } 32 | // flake-utils.lib.eachDefaultSystem ( 33 | system: let 34 | pkgs = 35 | import nixpkgs 36 | { 37 | inherit system; 38 | overlays = [ 39 | rust-overlay.overlays.default 40 | ]; 41 | }; 42 | 43 | rustToolchain = pkgs.rust-bin.stable.latest.default; 44 | craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; 45 | 46 | environment = with pkgs; { 47 | pname = "grm"; # otherwise `nix run` looks for git-repo-manager 48 | src = craneLib.cleanCargoSource (craneLib.path ./.); 49 | buildInputs = 50 | [ 51 | # tools 52 | pkg-config 53 | rustToolchain 54 | # deps 55 | git 56 | openssl 57 | openssl.dev 58 | zlib 59 | zlib.dev 60 | ] 61 | ++ lib.optional stdenv.isDarwin (with darwin.apple_sdk.frameworks; [ 62 | CoreFoundation 63 | CoreServices 64 | Security 65 | SystemConfiguration 66 | ]); 67 | }; 68 | in { 69 | apps = { 70 | default = self.apps.${system}.git-repo-manager; 71 | 72 | git-repo-manager = flake-utils.lib.mkApp { 73 | drv = self.packages.${system}.git-repo-manager; 74 | }; 75 | }; 76 | 77 | checks = { 78 | pkg = self.packages.${system}.default; 79 | shl = self.devShells.${system}.default; 80 | }; 81 | 82 | devShells = { 83 | default = pkgs.mkShell (environment 84 | // { 85 | buildInputs = 86 | environment.buildInputs 87 | ++ (with pkgs; [ 88 | alejandra # nix formatting 89 | black 90 | isort 91 | just 92 | mdbook 93 | python3 94 | ruff 95 | shellcheck 96 | shfmt 97 | ]); 98 | }); 99 | }; 100 | 101 | packages = { 102 | default = self.packages.${system}.git-repo-manager; 103 | 104 | git-repo-manager = craneLib.buildPackage (environment 105 | // { 106 | cargoArtifacts = craneLib.buildDepsOnly environment; 107 | }); 108 | }; 109 | } 110 | ); 111 | } 112 | -------------------------------------------------------------------------------- /pkg/arch/.SRCINFO: -------------------------------------------------------------------------------- 1 | pkgbase = grm-git 2 | pkgdesc = Manage git repos, worktrees and integrate with GitHub and GitLab 3 | pkgver = 0.7.21.r1.gfcd315b 4 | pkgrel = 1 5 | url = https://github.com/hakoerber/git-repo-manager 6 | arch = x86_64 7 | license = GPL-3.0-only 8 | makedepends = cargo 9 | makedepends = git 10 | depends = glibc 11 | depends = gcc-libs 12 | depends = libgit2 13 | depends = curl 14 | depends = openssl 15 | provides = grm 16 | conflicts = grm 17 | options = !lto 18 | options = !debug 19 | source = grm-git::git+https://github.com/hakoerber/git-repo-manager#branch=develop 20 | sha256sums = SKIP 21 | 22 | pkgname = grm-git 23 | -------------------------------------------------------------------------------- /pkg/arch/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !/.gitignore 3 | !/PKGBUILD 4 | !/.SRCINFO 5 | -------------------------------------------------------------------------------- /pkg/arch/PKGBUILD: -------------------------------------------------------------------------------- 1 | # Maintainer: Hannes Körber 2 | pkgname='grm-git' 3 | pkgver=0.7.21.r1.gfcd315b 4 | pkgrel=1 5 | pkgdesc='Manage git repos, worktrees and integrate with GitHub and GitLab' 6 | arch=('x86_64') 7 | url='https://github.com/hakoerber/git-repo-manager' 8 | license=('GPL-3.0-only') 9 | depends=('glibc' 'gcc-libs' 'libgit2' 'curl' 'openssl') 10 | makedepends=('cargo' 'git') 11 | provides=('grm') 12 | conflicts=('grm') 13 | source=("${pkgname}::git+https://github.com/hakoerber/git-repo-manager#branch=develop") 14 | sha256sums=('SKIP') 15 | # https://gitlab.archlinux.org/archlinux/packaging/packages/pacman/-/issues/20 16 | options=(!lto !debug) 17 | 18 | pkgver() { 19 | cd "${pkgname}" 20 | git describe --long --tags --abbrev=7 | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g' 21 | } 22 | 23 | prepare() { 24 | cd "${pkgname}" 25 | export RUSTUP_TOOLCHAIN=stable 26 | cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')" 27 | } 28 | 29 | build() { 30 | cd "${pkgname}" 31 | export RUSTUP_TOOLCHAIN=stable 32 | export CARGO_TARGET_DIR=target 33 | export GRM_RELEASE_VERSION="${pkgver}" 34 | cargo build --frozen --release 35 | } 36 | 37 | check() { 38 | cd "${pkgname}" 39 | export RUSTUP_TOOLCHAIN=stable 40 | cargo test --frozen 41 | } 42 | 43 | package() { 44 | cd "${pkgname}" 45 | install -Dm0755 -t "$pkgdir/usr/bin/" "target/release/${pkgname/-git}" 46 | } 47 | -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | 7 | usage() { 8 | printf '%s\n' "usage: $0 (major|minor|patch)" >&2 9 | } 10 | 11 | if (($# != 1)); then 12 | usage 13 | exit 1 14 | fi 15 | 16 | current_version="$(grep '^version \?=' Cargo.toml | head -1 | cut -d '=' -f 2 | tr -d " '"'"')" 17 | 18 | major="$(printf '%s' "${current_version}" | grep -oP '^\d+')" 19 | minor="$(printf '%s' "${current_version}" | grep -oP '\.\d+\.' | tr -d '.')" 20 | patch="$(printf '%s' "${current_version}" | grep -oP '\d+$' | tr -d '.')" 21 | 22 | case "$1" in 23 | major) 24 | ((major++)) || true 25 | minor=0 26 | patch=0 27 | 28 | printf '%s\n' "Are you sure you want to release 1.x?" >&2 29 | exit 1 30 | ;; 31 | minor) 32 | ((minor++)) || true 33 | patch=0 34 | ;; 35 | patch) 36 | ((patch++)) || true 37 | ;; 38 | *) 39 | usage 40 | exit 1 41 | ;; 42 | esac 43 | 44 | new_version="${major}.${minor}.${patch}" 45 | 46 | if ! [[ "${new_version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then 47 | printf '%s\n' 'Version has to a complete semver' >&2 48 | exit 1 49 | fi 50 | 51 | current_branch="$(git rev-parse --abbrev-ref HEAD)" 52 | if [[ "${current_branch}" != "develop" ]]; then 53 | printf '%s\n' 'You need to be on develop' >&2 54 | exit 1 55 | fi 56 | 57 | gitstatus="$(git status --porcelain)" 58 | if [[ -n "${gitstatus}" ]]; then 59 | printf '%s\n' 'There are uncommitted changes' >&2 60 | exit 1 61 | fi 62 | 63 | if git tag --list "v${new_version}" | grep -q .; then 64 | printf 'Tag %s already exists\n' "v${new_version}" >&2 65 | exit 1 66 | fi 67 | 68 | for remote in $(git remote); do 69 | if git ls-remote --tags "${remote}" | grep -q "refs/tags/v${new_version}$"; then 70 | printf 'Tag %s already exists on %s\n' "v${new_version}" "${remote}" >&2 71 | exit 1 72 | fi 73 | done 74 | 75 | git fetch --all 76 | 77 | for remote in $(git remote); do 78 | for branch in master develop; do 79 | if ! git diff --quiet "${remote}/${branch}..${branch}"; then 80 | printf 'Remote branch %s/%s not up to date, synchronize first!\n' "${remote}" "${branch}" >&2 81 | exit 1 82 | fi 83 | done 84 | done 85 | 86 | if ! git merge-base --is-ancestor master develop; then 87 | printf '%s\n' 'Develop is not a straight descendant of master, rebase!' >&2 88 | exit 1 89 | fi 90 | 91 | changes="$(git log --oneline master..develop | wc -l)" 92 | if ((changes == 0)); then 93 | printf '%s\n' 'No changes between master and develop?' >&2 94 | exit 1 95 | fi 96 | 97 | sed -i "0,/^version/{s/^version.*$/version = \"${new_version}\"/}" Cargo.toml 98 | 99 | cargo update --package git-repo-manager --precise "${new_version}" 100 | 101 | diff="$(git diff --numstat)" 102 | if (($(printf '%s\n' "${diff}" | wc -l || true) != 2)); then 103 | printf '%s\n' 'Weird changes detected, bailing' >&2 104 | exit 1 105 | fi 106 | 107 | if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.lock$'; then 108 | printf '%s\n' 'Weird changes detected, bailing' >&2 109 | exit 1 110 | fi 111 | 112 | if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.toml$'; then 113 | printf '%s\n' 'Weird changes detected, bailing' >&2 114 | exit 1 115 | fi 116 | 117 | git add Cargo.lock Cargo.toml 118 | 119 | git commit -m "Release v${new_version}" 120 | 121 | git switch master 2>/dev/null || { [[ -d "../master" ]] && cd "../master"; } || { 122 | printf '%s\n' 'Could not change to master' >&2 123 | exit 1 124 | } 125 | 126 | current_branch="$(git rev-parse --abbrev-ref HEAD)" 127 | if [[ "${current_branch}" != "master" ]]; then 128 | printf '%s\n' 'Looks like branch switching to master did not work' >&2 129 | exit 1 130 | fi 131 | 132 | git merge --no-ff --no-edit develop 133 | git tag "v${new_version}" 134 | 135 | for remote in $(git remote); do 136 | while ! git push "${remote}" "v${new_version}" master; do 137 | : 138 | done 139 | done 140 | 141 | git switch develop 2>/dev/null || { [[ -d "../develop" ]] && cd "../develop"; } || { 142 | printf '%s\n' 'Could not change to develop' >&2 143 | exit 1 144 | } 145 | 146 | current_branch="$(git rev-parse --abbrev-ref HEAD)" 147 | if [[ "${current_branch}" != "develop" ]]; then 148 | printf '%s\n' 'Looks like branch switching to develop did not work' >&2 149 | exit 1 150 | fi 151 | 152 | git merge --ff-only master 153 | 154 | for remote in $(git remote); do 155 | while ! git push "${remote}" develop; do 156 | : 157 | done 158 | done 159 | 160 | cargo publish 161 | 162 | printf 'Published %s successfully\n' "${new_version}" 163 | exit 0 164 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "stable" 3 | targets = ["x86_64-unknown-linux-musl"] 4 | -------------------------------------------------------------------------------- /src/auth.rs: -------------------------------------------------------------------------------- 1 | use std::process; 2 | 3 | #[derive(Clone)] 4 | pub struct AuthToken(String); 5 | 6 | impl AuthToken { 7 | pub fn access(&self) -> &str { 8 | &self.0 9 | } 10 | } 11 | 12 | pub fn get_token_from_command(command: &str) -> Result { 13 | let output = process::Command::new("/usr/bin/env") 14 | .arg("sh") 15 | .arg("-c") 16 | .arg(command) 17 | .output() 18 | .map_err(|error| format!("Failed to run token-command: {error}"))?; 19 | 20 | let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?; 21 | let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?; 22 | 23 | if !output.status.success() { 24 | return if !stderr.is_empty() { 25 | Err(format!("Token command failed: {stderr}")) 26 | } else { 27 | Err(String::from("Token command failed.")) 28 | }; 29 | } 30 | 31 | if !stderr.is_empty() { 32 | return Err(format!("Token command produced stderr: {stderr}")); 33 | } 34 | 35 | if stdout.is_empty() { 36 | return Err(String::from("Token command did not produce output")); 37 | } 38 | 39 | let token = stdout 40 | .split('\n') 41 | .next() 42 | .ok_or_else(|| String::from("Output did not contain any newline"))?; 43 | 44 | Ok(AuthToken(token.to_string())) 45 | } 46 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::process; 3 | 4 | use std::path::Path; 5 | 6 | use super::auth; 7 | use super::output::*; 8 | use super::path; 9 | use super::provider; 10 | use super::provider::Filter; 11 | use super::provider::Provider; 12 | use super::repo; 13 | use super::tree; 14 | 15 | pub type RemoteProvider = provider::RemoteProvider; 16 | pub type RemoteType = repo::RemoteType; 17 | 18 | fn worktree_setup_default() -> bool { 19 | false 20 | } 21 | 22 | #[derive(Debug, Serialize, Deserialize)] 23 | #[serde(untagged)] 24 | pub enum Config { 25 | ConfigTrees(ConfigTrees), 26 | ConfigProvider(ConfigProvider), 27 | } 28 | 29 | #[derive(Debug, Serialize, Deserialize)] 30 | #[serde(deny_unknown_fields)] 31 | pub struct ConfigTrees { 32 | pub trees: Vec, 33 | } 34 | 35 | #[derive(Debug, Serialize, Deserialize)] 36 | #[serde(deny_unknown_fields)] 37 | pub struct ConfigProviderFilter { 38 | pub access: Option, 39 | pub owner: Option, 40 | pub users: Option>, 41 | pub groups: Option>, 42 | } 43 | 44 | #[derive(Debug, Serialize, Deserialize)] 45 | #[serde(deny_unknown_fields)] 46 | pub struct ConfigProvider { 47 | pub provider: RemoteProvider, 48 | pub token_command: String, 49 | pub root: String, 50 | pub filters: Option, 51 | 52 | pub force_ssh: Option, 53 | 54 | pub api_url: Option, 55 | 56 | pub worktree: Option, 57 | 58 | pub remote_name: Option, 59 | } 60 | 61 | #[derive(Debug, Serialize, Deserialize)] 62 | #[serde(deny_unknown_fields)] 63 | pub struct RemoteConfig { 64 | pub name: String, 65 | pub url: String, 66 | #[serde(rename = "type")] 67 | pub remote_type: RemoteType, 68 | } 69 | 70 | impl RemoteConfig { 71 | pub fn from_remote(remote: repo::Remote) -> Self { 72 | Self { 73 | name: remote.name, 74 | url: remote.url, 75 | remote_type: remote.remote_type, 76 | } 77 | } 78 | 79 | pub fn into_remote(self) -> repo::Remote { 80 | repo::Remote { 81 | name: self.name, 82 | url: self.url, 83 | remote_type: self.remote_type, 84 | } 85 | } 86 | } 87 | 88 | #[derive(Debug, Serialize, Deserialize)] 89 | #[serde(deny_unknown_fields)] 90 | pub struct RepoConfig { 91 | pub name: String, 92 | 93 | #[serde(default = "worktree_setup_default")] 94 | pub worktree_setup: bool, 95 | 96 | pub remotes: Option>, 97 | } 98 | 99 | impl RepoConfig { 100 | pub fn from_repo(repo: repo::Repo) -> Self { 101 | Self { 102 | name: repo.name, 103 | worktree_setup: repo.worktree_setup, 104 | remotes: repo 105 | .remotes 106 | .map(|remotes| remotes.into_iter().map(RemoteConfig::from_remote).collect()), 107 | } 108 | } 109 | 110 | pub fn into_repo(self) -> repo::Repo { 111 | let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') { 112 | (Some(namespace.to_string()), name.to_string()) 113 | } else { 114 | (None, self.name) 115 | }; 116 | 117 | repo::Repo { 118 | name, 119 | namespace, 120 | worktree_setup: self.worktree_setup, 121 | remotes: self.remotes.map(|remotes| { 122 | remotes 123 | .into_iter() 124 | .map(|remote| remote.into_remote()) 125 | .collect() 126 | }), 127 | } 128 | } 129 | } 130 | 131 | impl ConfigTrees { 132 | pub fn to_config(self) -> Config { 133 | Config::ConfigTrees(self) 134 | } 135 | 136 | pub fn from_vec(vec: Vec) -> Self { 137 | Self { trees: vec } 138 | } 139 | 140 | pub fn from_trees(vec: Vec) -> Self { 141 | Self { 142 | trees: vec.into_iter().map(ConfigTree::from_tree).collect(), 143 | } 144 | } 145 | 146 | pub fn trees(self) -> Vec { 147 | self.trees 148 | } 149 | 150 | pub fn trees_mut(&mut self) -> &mut Vec { 151 | &mut self.trees 152 | } 153 | 154 | pub fn trees_ref(&self) -> &Vec { 155 | self.trees.as_ref() 156 | } 157 | } 158 | 159 | impl Config { 160 | pub fn trees(self) -> Result, String> { 161 | match self { 162 | Self::ConfigTrees(config) => Ok(config.trees), 163 | Self::ConfigProvider(config) => { 164 | let token = match auth::get_token_from_command(&config.token_command) { 165 | Ok(token) => token, 166 | Err(error) => { 167 | print_error(&format!("Getting token from command failed: {error}")); 168 | process::exit(1); 169 | } 170 | }; 171 | 172 | let filters = config.filters.unwrap_or(ConfigProviderFilter { 173 | access: Some(false), 174 | owner: Some(false), 175 | users: Some(vec![]), 176 | groups: Some(vec![]), 177 | }); 178 | 179 | let filter = Filter::new( 180 | filters.users.unwrap_or_default(), 181 | filters.groups.unwrap_or_default(), 182 | filters.owner.unwrap_or(false), 183 | filters.access.unwrap_or(false), 184 | ); 185 | 186 | if filter.empty() { 187 | print_warning( 188 | "The configuration does not contain any filters, so no repos will match", 189 | ); 190 | } 191 | 192 | let repos = match config.provider { 193 | RemoteProvider::Github => { 194 | match provider::Github::new(filter, token, config.api_url) { 195 | Ok(provider) => provider, 196 | Err(error) => { 197 | print_error(&format!("Error: {error}")); 198 | process::exit(1); 199 | } 200 | } 201 | .get_repos( 202 | config.worktree.unwrap_or(false), 203 | config.force_ssh.unwrap_or(false), 204 | config.remote_name, 205 | )? 206 | } 207 | RemoteProvider::Gitlab => { 208 | match provider::Gitlab::new(filter, token, config.api_url) { 209 | Ok(provider) => provider, 210 | Err(error) => { 211 | print_error(&format!("Error: {error}")); 212 | process::exit(1); 213 | } 214 | } 215 | .get_repos( 216 | config.worktree.unwrap_or(false), 217 | config.force_ssh.unwrap_or(false), 218 | config.remote_name, 219 | )? 220 | } 221 | }; 222 | 223 | let mut trees = vec![]; 224 | 225 | for (namespace, namespace_repos) in repos { 226 | let repos = namespace_repos 227 | .into_iter() 228 | .map(RepoConfig::from_repo) 229 | .collect(); 230 | let tree = ConfigTree { 231 | root: if let Some(namespace) = namespace { 232 | path::path_as_string(&Path::new(&config.root).join(namespace)) 233 | } else { 234 | path::path_as_string(Path::new(&config.root)) 235 | }, 236 | repos: Some(repos), 237 | }; 238 | trees.push(tree); 239 | } 240 | Ok(trees) 241 | } 242 | } 243 | } 244 | 245 | pub fn from_trees(trees: Vec) -> Self { 246 | Self::ConfigTrees(ConfigTrees { trees }) 247 | } 248 | 249 | pub fn normalize(&mut self) { 250 | if let Self::ConfigTrees(config) = self { 251 | let home = path::env_home(); 252 | for tree in &mut config.trees_mut().iter_mut() { 253 | if tree.root.starts_with(&home) { 254 | // The tilde is not handled differently, it's just a normal path component for `Path`. 255 | // Therefore we can treat it like that during **output**. 256 | // 257 | // The `unwrap()` is safe here as we are testing via `starts_with()` 258 | // beforehand 259 | let mut path = tree.root.strip_prefix(&home).unwrap(); 260 | if path.starts_with('/') { 261 | path = path.strip_prefix('/').unwrap(); 262 | } 263 | 264 | tree.root = Path::new("~").join(path).display().to_string(); 265 | } 266 | } 267 | } 268 | } 269 | 270 | pub fn as_toml(&self) -> Result { 271 | match toml::to_string(self) { 272 | Ok(toml) => Ok(toml), 273 | Err(error) => Err(error.to_string()), 274 | } 275 | } 276 | 277 | pub fn as_yaml(&self) -> Result { 278 | serde_yaml::to_string(self).map_err(|e| e.to_string()) 279 | } 280 | } 281 | 282 | #[derive(Debug, Serialize, Deserialize)] 283 | #[serde(deny_unknown_fields)] 284 | pub struct ConfigTree { 285 | pub root: String, 286 | pub repos: Option>, 287 | } 288 | 289 | impl ConfigTree { 290 | pub fn from_repos(root: String, repos: Vec) -> Self { 291 | Self { 292 | root, 293 | repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()), 294 | } 295 | } 296 | 297 | pub fn from_tree(tree: tree::Tree) -> Self { 298 | Self { 299 | root: tree.root, 300 | repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()), 301 | } 302 | } 303 | } 304 | 305 | pub fn read_config<'a, T>(path: &str) -> Result 306 | where 307 | T: for<'de> serde::Deserialize<'de>, 308 | { 309 | let content = match std::fs::read_to_string(path) { 310 | Ok(s) => s, 311 | Err(e) => { 312 | return Err(format!( 313 | "Error reading configuration file \"{path}\": {}", 314 | match e.kind() { 315 | std::io::ErrorKind::NotFound => String::from("not found"), 316 | _ => e.to_string(), 317 | } 318 | )); 319 | } 320 | }; 321 | 322 | let config: T = match toml::from_str(&content) { 323 | Ok(c) => c, 324 | Err(_) => match serde_yaml::from_str(&content) { 325 | Ok(c) => c, 326 | Err(e) => return Err(format!("Error parsing configuration file \"{path}\": {e}",)), 327 | }, 328 | }; 329 | 330 | Ok(config) 331 | } 332 | -------------------------------------------------------------------------------- /src/grm/cmd.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | 3 | #[derive(Parser)] 4 | #[clap( 5 | name = clap::crate_name!(), 6 | version = clap::crate_version!(), 7 | author = clap::crate_authors!("\n"), 8 | about = clap::crate_description!(), 9 | long_version = clap::crate_version!(), 10 | propagate_version = true, 11 | )] 12 | pub struct Opts { 13 | #[clap(subcommand)] 14 | pub subcmd: SubCommand, 15 | } 16 | 17 | #[derive(Parser)] 18 | pub enum SubCommand { 19 | #[clap(about = "Manage repositories")] 20 | Repos(Repos), 21 | #[clap(visible_alias = "wt", about = "Manage worktrees")] 22 | Worktree(Worktree), 23 | } 24 | 25 | #[derive(Parser)] 26 | pub struct Repos { 27 | #[clap(subcommand, name = "action")] 28 | pub action: ReposAction, 29 | } 30 | 31 | #[derive(Parser)] 32 | pub enum ReposAction { 33 | #[clap(subcommand)] 34 | Sync(SyncAction), 35 | #[clap(subcommand)] 36 | Find(FindAction), 37 | #[clap(about = "Show status of configured repositories")] 38 | Status(OptionalConfig), 39 | } 40 | 41 | #[derive(Parser)] 42 | #[clap(about = "Sync local repositories with a configured list")] 43 | pub enum SyncAction { 44 | #[clap(about = "Synchronize the repositories to the configured values")] 45 | Config(Config), 46 | #[clap(about = "Synchronize the repositories from a remote provider")] 47 | Remote(SyncRemoteArgs), 48 | } 49 | 50 | #[derive(Parser)] 51 | #[clap(about = "Generate a repository configuration from existing repositories")] 52 | pub enum FindAction { 53 | #[clap(about = "Find local repositories")] 54 | Local(FindLocalArgs), 55 | #[clap(about = "Find repositories on remote provider")] 56 | Remote(FindRemoteArgs), 57 | #[clap(about = "Find repositories as defined in the configuration file")] 58 | Config(FindConfigArgs), 59 | } 60 | 61 | #[derive(Parser)] 62 | pub struct FindLocalArgs { 63 | #[clap(help = "The path to search through")] 64 | pub path: String, 65 | 66 | #[clap( 67 | short, 68 | long, 69 | help = "Exclude repositories that match the given regex", 70 | name = "REGEX" 71 | )] 72 | pub exclude: Option, 73 | 74 | #[clap( 75 | value_enum, 76 | short, 77 | long, 78 | help = "Format to produce", 79 | default_value_t = ConfigFormat::Toml, 80 | )] 81 | pub format: ConfigFormat, 82 | } 83 | 84 | #[derive(Parser)] 85 | pub struct FindConfigArgs { 86 | #[clap( 87 | short, 88 | long, 89 | default_value = "./config.toml", 90 | help = "Path to the configuration file" 91 | )] 92 | pub config: String, 93 | 94 | #[clap( 95 | value_enum, 96 | short, 97 | long, 98 | help = "Format to produce", 99 | default_value_t = ConfigFormat::Toml, 100 | )] 101 | pub format: ConfigFormat, 102 | } 103 | 104 | #[derive(Parser)] 105 | #[clap()] 106 | pub struct FindRemoteArgs { 107 | #[clap(short, long, help = "Path to the configuration file")] 108 | pub config: Option, 109 | 110 | #[clap(value_enum, short, long, help = "Remote provider to use")] 111 | pub provider: RemoteProvider, 112 | 113 | #[clap(short, long, help = "Name of the remote to use")] 114 | pub remote_name: Option, 115 | 116 | #[clap( 117 | action = clap::ArgAction::Append, 118 | name = "user", 119 | long, 120 | help = "Users to get repositories from" 121 | )] 122 | pub users: Vec, 123 | 124 | #[clap( 125 | action = clap::ArgAction::Append, 126 | name = "group", 127 | long, 128 | help = "Groups to get repositories from" 129 | )] 130 | pub groups: Vec, 131 | 132 | #[clap(long, help = "Get repositories that belong to the requesting user")] 133 | pub owner: bool, 134 | 135 | #[clap(long, help = "Get repositories that the requesting user has access to")] 136 | pub access: bool, 137 | 138 | #[clap(long, help = "Always use SSH, even for public repositories")] 139 | pub force_ssh: bool, 140 | 141 | #[clap(long, help = "Command to get API token")] 142 | pub token_command: String, 143 | 144 | #[clap(long, help = "Root of the repo tree to produce")] 145 | pub root: String, 146 | 147 | #[clap( 148 | value_enum, 149 | short, 150 | long, 151 | help = "Format to produce", 152 | default_value_t = ConfigFormat::Toml, 153 | )] 154 | pub format: ConfigFormat, 155 | 156 | #[clap( 157 | long, 158 | help = "Use worktree setup for repositories", 159 | value_parser = ["true", "false"], 160 | default_value = "false", 161 | default_missing_value = "true", 162 | num_args = 0..=1, 163 | )] 164 | pub worktree: String, 165 | 166 | #[clap(long, help = "Base URL for the API")] 167 | pub api_url: Option, 168 | } 169 | 170 | #[derive(Parser)] 171 | #[clap()] 172 | pub struct Config { 173 | #[clap( 174 | short, 175 | long, 176 | default_value = "./config.toml", 177 | help = "Path to the configuration file" 178 | )] 179 | pub config: String, 180 | 181 | #[clap( 182 | long, 183 | value_parser = ["true", "false"], 184 | help = "Check out the default worktree after clone", 185 | default_value = "true", 186 | default_missing_value = "true", 187 | num_args = 0..=1, 188 | )] 189 | pub init_worktree: String, 190 | } 191 | 192 | pub type RemoteProvider = super::provider::RemoteProvider; 193 | 194 | #[derive(Parser)] 195 | #[clap()] 196 | pub struct SyncRemoteArgs { 197 | #[clap(value_enum, short, long, help = "Remote provider to use")] 198 | pub provider: RemoteProvider, 199 | 200 | #[clap(short, long, help = "Name of the remote to use")] 201 | pub remote_name: Option, 202 | 203 | #[clap( 204 | action = clap::ArgAction::Append, 205 | name = "user", 206 | long, 207 | help = "Users to get repositories from" 208 | )] 209 | pub users: Vec, 210 | 211 | #[clap( 212 | action = clap::ArgAction::Append, 213 | name = "group", 214 | long, 215 | help = "Groups to get repositories from" 216 | )] 217 | pub groups: Vec, 218 | 219 | #[clap(long, help = "Get repositories that belong to the requesting user")] 220 | pub owner: bool, 221 | 222 | #[clap(long, help = "Get repositories that the requesting user has access to")] 223 | pub access: bool, 224 | 225 | #[clap(long, help = "Always use SSH, even for public repositories")] 226 | pub force_ssh: bool, 227 | 228 | #[clap(long, help = "Command to get API token")] 229 | pub token_command: String, 230 | 231 | #[clap(long, help = "Root of the repo tree to produce")] 232 | pub root: String, 233 | 234 | #[clap( 235 | long, 236 | help = "Use worktree setup for repositories", 237 | value_parser = ["true", "false"], 238 | default_value = "false", 239 | default_missing_value = "true", 240 | num_args = 0..=1, 241 | )] 242 | pub worktree: String, 243 | 244 | #[clap(long, help = "Base URL for the API")] 245 | pub api_url: Option, 246 | 247 | #[clap( 248 | long, 249 | help = "Check out the default worktree after clone", 250 | value_parser = ["true", "false"], 251 | default_value = "true", 252 | default_missing_value = "true", 253 | num_args = 0..=1, 254 | )] 255 | pub init_worktree: String, 256 | } 257 | 258 | #[derive(Parser)] 259 | #[clap()] 260 | pub struct OptionalConfig { 261 | #[clap(short, long, help = "Path to the configuration file")] 262 | pub config: Option, 263 | } 264 | 265 | #[derive(clap::ValueEnum, Clone)] 266 | pub enum ConfigFormat { 267 | Yaml, 268 | Toml, 269 | } 270 | 271 | #[derive(Parser)] 272 | pub struct Worktree { 273 | #[clap(subcommand, name = "action")] 274 | pub action: WorktreeAction, 275 | } 276 | 277 | #[derive(Parser)] 278 | pub enum WorktreeAction { 279 | #[clap(about = "Add a new worktree")] 280 | Add(WorktreeAddArgs), 281 | #[clap(about = "Add an existing worktree")] 282 | Delete(WorktreeDeleteArgs), 283 | #[clap(about = "Show state of existing worktrees")] 284 | Status(WorktreeStatusArgs), 285 | #[clap(about = "Convert a normal repository to a worktree setup")] 286 | Convert(WorktreeConvertArgs), 287 | #[clap(about = "Clean all worktrees that do not contain uncommited/unpushed changes")] 288 | Clean(WorktreeCleanArgs), 289 | #[clap(about = "Fetch refs from remotes")] 290 | Fetch(WorktreeFetchArgs), 291 | #[clap(about = "Fetch refs from remotes and update local branches")] 292 | Pull(WorktreePullArgs), 293 | #[clap(about = "Rebase worktree onto default branch")] 294 | Rebase(WorktreeRebaseArgs), 295 | } 296 | 297 | #[derive(Parser)] 298 | pub struct WorktreeAddArgs { 299 | #[clap(help = "Name of the worktree")] 300 | pub name: String, 301 | 302 | #[clap(short = 't', long = "track", help = "Remote branch to track")] 303 | pub track: Option, 304 | 305 | #[clap(long = "no-track", help = "Disable tracking")] 306 | pub no_track: bool, 307 | } 308 | #[derive(Parser)] 309 | pub struct WorktreeDeleteArgs { 310 | #[clap(help = "Name of the worktree")] 311 | pub name: String, 312 | 313 | #[clap( 314 | long = "force", 315 | help = "Force deletion, even when there are uncommitted/unpushed changes" 316 | )] 317 | pub force: bool, 318 | } 319 | 320 | #[derive(Parser)] 321 | pub struct WorktreeStatusArgs {} 322 | 323 | #[derive(Parser)] 324 | pub struct WorktreeConvertArgs {} 325 | 326 | #[derive(Parser)] 327 | pub struct WorktreeCleanArgs {} 328 | 329 | #[derive(Parser)] 330 | pub struct WorktreeFetchArgs {} 331 | 332 | #[derive(Parser)] 333 | pub struct WorktreePullArgs { 334 | #[clap(long = "rebase", help = "Perform a rebase instead of a fast-forward")] 335 | pub rebase: bool, 336 | #[clap(long = "stash", help = "Stash & unstash changes before & after pull")] 337 | pub stash: bool, 338 | } 339 | 340 | #[derive(Parser)] 341 | pub struct WorktreeRebaseArgs { 342 | #[clap(long = "pull", help = "Perform a pull before rebasing")] 343 | pub pull: bool, 344 | #[clap(long = "rebase", help = "Perform a rebase when doing a pull")] 345 | pub rebase: bool, 346 | #[clap(long = "stash", help = "Stash & unstash changes before & after rebase")] 347 | pub stash: bool, 348 | } 349 | 350 | pub fn parse() -> Opts { 351 | Opts::parse() 352 | } 353 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | 3 | use std::path::Path; 4 | 5 | pub mod auth; 6 | pub mod config; 7 | pub mod output; 8 | pub mod path; 9 | pub mod provider; 10 | pub mod repo; 11 | pub mod table; 12 | pub mod tree; 13 | pub mod worktree; 14 | 15 | /// Find all git repositories under root, recursively 16 | /// 17 | /// The bool in the return value specifies whether there is a repository 18 | /// in root itself. 19 | #[allow(clippy::type_complexity)] 20 | fn find_repos( 21 | root: &Path, 22 | exclusion_pattern: Option<&str>, 23 | ) -> Result, Vec, bool)>, String> { 24 | let mut repos: Vec = Vec::new(); 25 | let mut repo_in_root = false; 26 | let mut warnings = Vec::new(); 27 | 28 | let exlusion_regex: regex::Regex = regex::Regex::new(exclusion_pattern.unwrap_or(r"^$")) 29 | .map_err(|e| format!("invalid regex: {e}"))?; 30 | for path in tree::find_repo_paths(root)? { 31 | if exclusion_pattern.is_some() && exlusion_regex.is_match(&path::path_as_string(&path)) { 32 | warnings.push(format!("[skipped] {}", &path::path_as_string(&path))); 33 | continue; 34 | } 35 | 36 | let is_worktree = repo::RepoHandle::detect_worktree(&path); 37 | if path == root { 38 | repo_in_root = true; 39 | } 40 | 41 | match repo::RepoHandle::open(&path, is_worktree) { 42 | Err(error) => { 43 | warnings.push(format!( 44 | "Error opening repo {}{}: {}", 45 | path.display(), 46 | if is_worktree { " as worktree" } else { "" }, 47 | error 48 | )); 49 | continue; 50 | } 51 | Ok(repo) => { 52 | let remotes = match repo.remotes() { 53 | Ok(remote) => remote, 54 | Err(error) => { 55 | warnings.push(format!( 56 | "{}: Error getting remotes: {}", 57 | &path::path_as_string(&path), 58 | error 59 | )); 60 | continue; 61 | } 62 | }; 63 | 64 | let mut results: Vec = Vec::new(); 65 | for remote_name in remotes { 66 | match repo.find_remote(&remote_name)? { 67 | Some(remote) => { 68 | let name = remote.name(); 69 | let url = remote.url(); 70 | let remote_type = match repo::detect_remote_type(&url) { 71 | Ok(t) => t, 72 | Err(e) => { 73 | warnings.push(format!( 74 | "{}: Could not handle URL {}. Reason: {}", 75 | &path::path_as_string(&path), 76 | &url, 77 | e 78 | )); 79 | continue; 80 | } 81 | }; 82 | 83 | results.push(repo::Remote { 84 | name, 85 | url, 86 | remote_type, 87 | }); 88 | } 89 | None => { 90 | warnings.push(format!( 91 | "{}: Remote {} not found", 92 | &path::path_as_string(&path), 93 | remote_name 94 | )); 95 | continue; 96 | } 97 | }; 98 | } 99 | let remotes = results; 100 | 101 | let (namespace, name) = if path == root { 102 | ( 103 | None, 104 | match &root.parent() { 105 | Some(parent) => { 106 | path::path_as_string(path.strip_prefix(parent).unwrap()) 107 | } 108 | None => { 109 | warnings.push(String::from("Getting name of the search root failed. Do you have a git repository in \"/\"?")); 110 | continue; 111 | } 112 | }, 113 | ) 114 | } else { 115 | let name = path.strip_prefix(root).unwrap(); 116 | let namespace = name.parent().unwrap(); 117 | ( 118 | if namespace == Path::new("") { 119 | None 120 | } else { 121 | Some(path::path_as_string(namespace).to_string()) 122 | }, 123 | path::path_as_string(name), 124 | ) 125 | }; 126 | 127 | repos.push(repo::Repo { 128 | name, 129 | namespace, 130 | remotes: Some(remotes), 131 | worktree_setup: is_worktree, 132 | }); 133 | } 134 | } 135 | } 136 | Ok(Some((repos, warnings, repo_in_root))) 137 | } 138 | 139 | pub fn find_in_tree( 140 | path: &Path, 141 | exclusion_pattern: Option<&str>, 142 | ) -> Result<(tree::Tree, Vec), String> { 143 | let mut warnings = Vec::new(); 144 | 145 | let (repos, repo_in_root): (Vec, bool) = match find_repos(path, exclusion_pattern)? 146 | { 147 | Some((vec, mut repo_warnings, repo_in_root)) => { 148 | warnings.append(&mut repo_warnings); 149 | (vec, repo_in_root) 150 | } 151 | None => (Vec::new(), false), 152 | }; 153 | 154 | let mut root = path.to_path_buf(); 155 | if repo_in_root { 156 | root = match root.parent() { 157 | Some(root) => root.to_path_buf(), 158 | None => { 159 | return Err(String::from( 160 | "Cannot detect root directory. Are you working in /?", 161 | )); 162 | } 163 | } 164 | } 165 | 166 | Ok(( 167 | tree::Tree { 168 | root: root.into_os_string().into_string().unwrap(), 169 | repos, 170 | }, 171 | warnings, 172 | )) 173 | } 174 | -------------------------------------------------------------------------------- /src/output.rs: -------------------------------------------------------------------------------- 1 | use console::{Style, Term}; 2 | 3 | pub fn print_repo_error(repo: &str, message: &str) { 4 | print_error(&format!("{repo}: {message}")); 5 | } 6 | 7 | pub fn print_error(message: &str) { 8 | let stderr = Term::stderr(); 9 | let mut style = Style::new().red(); 10 | if stderr.is_term() { 11 | style = style.force_styling(true); 12 | } 13 | stderr 14 | .write_line(&format!("[{}] {}", style.apply_to('\u{2718}'), &message)) 15 | .unwrap(); 16 | } 17 | 18 | pub fn print_repo_action(repo: &str, message: &str) { 19 | print_action(&format!("{repo}: {message}")); 20 | } 21 | 22 | pub fn print_action(message: &str) { 23 | let stdout = Term::stdout(); 24 | let mut style = Style::new().yellow(); 25 | if stdout.is_term() { 26 | style = style.force_styling(true); 27 | } 28 | stdout 29 | .write_line(&format!("[{}] {}", style.apply_to('\u{2699}'), &message)) 30 | .unwrap(); 31 | } 32 | 33 | pub fn print_warning(message: &str) { 34 | let stderr = Term::stderr(); 35 | let mut style = Style::new().yellow(); 36 | if stderr.is_term() { 37 | style = style.force_styling(true); 38 | } 39 | stderr 40 | .write_line(&format!("[{}] {}", style.apply_to('!'), &message)) 41 | .unwrap(); 42 | } 43 | 44 | pub fn print_repo_success(repo: &str, message: &str) { 45 | print_success(&format!("{repo}: {message}")); 46 | } 47 | 48 | pub fn print_success(message: &str) { 49 | let stdout = Term::stdout(); 50 | let mut style = Style::new().green(); 51 | if stdout.is_term() { 52 | style = style.force_styling(true); 53 | } 54 | 55 | stdout 56 | .write_line(&format!("[{}] {}", style.apply_to('\u{2714}'), &message)) 57 | .unwrap(); 58 | } 59 | -------------------------------------------------------------------------------- /src/path.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | use std::process; 3 | 4 | use super::output::*; 5 | 6 | pub fn path_as_string(path: &Path) -> String { 7 | path.to_path_buf().into_os_string().into_string().unwrap() 8 | } 9 | 10 | pub fn env_home() -> String { 11 | match std::env::var("HOME") { 12 | Ok(path) => path, 13 | Err(error) => { 14 | print_error(&format!("Unable to read HOME: {error}")); 15 | process::exit(1); 16 | } 17 | } 18 | } 19 | 20 | pub fn expand_path(path: &Path) -> PathBuf { 21 | let expanded_path = match shellexpand::full_with_context( 22 | &path_as_string(path), 23 | || Some(env_home()), 24 | |name| -> Result, &'static str> { 25 | match name { 26 | "HOME" => Ok(Some(env_home())), 27 | _ => Ok(None), 28 | } 29 | }, 30 | ) { 31 | Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(), 32 | Ok(std::borrow::Cow::Owned(path)) => path, 33 | Err(error) => { 34 | print_error(&format!("Unable to expand root: {error}")); 35 | process::exit(1); 36 | } 37 | }; 38 | 39 | Path::new(&expanded_path).to_path_buf() 40 | } 41 | 42 | #[cfg(test)] 43 | mod tests { 44 | use super::*; 45 | 46 | fn setup() { 47 | std::env::set_var("HOME", "/home/test"); 48 | } 49 | 50 | #[test] 51 | fn check_expand_tilde() { 52 | setup(); 53 | assert_eq!( 54 | expand_path(Path::new("~/file")), 55 | Path::new("/home/test/file") 56 | ); 57 | } 58 | 59 | #[test] 60 | fn check_expand_invalid_tilde() { 61 | setup(); 62 | assert_eq!( 63 | expand_path(Path::new("/home/~/file")), 64 | Path::new("/home/~/file") 65 | ); 66 | } 67 | 68 | #[test] 69 | fn check_expand_home() { 70 | setup(); 71 | assert_eq!( 72 | expand_path(Path::new("$HOME/file")), 73 | Path::new("/home/test/file") 74 | ); 75 | assert_eq!( 76 | expand_path(Path::new("${HOME}/file")), 77 | Path::new("/home/test/file") 78 | ); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/provider/github.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | use super::auth; 4 | use super::escape; 5 | use super::ApiErrorResponse; 6 | use super::Filter; 7 | use super::JsonError; 8 | use super::Project; 9 | use super::Provider; 10 | 11 | const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json"; 12 | const GITHUB_API_BASEURL: &str = match option_env!("GITHUB_API_BASEURL") { 13 | Some(url) => url, 14 | None => "https://api.github.com", 15 | }; 16 | 17 | #[derive(Deserialize)] 18 | pub struct GithubProject { 19 | pub name: String, 20 | pub full_name: String, 21 | pub clone_url: String, 22 | pub ssh_url: String, 23 | pub private: bool, 24 | } 25 | 26 | #[derive(Deserialize)] 27 | struct GithubUser { 28 | #[serde(rename = "login")] 29 | pub username: String, 30 | } 31 | 32 | impl Project for GithubProject { 33 | fn name(&self) -> String { 34 | self.name.clone() 35 | } 36 | 37 | fn namespace(&self) -> Option { 38 | if let Some((namespace, _name)) = self.full_name.rsplit_once('/') { 39 | Some(namespace.to_string()) 40 | } else { 41 | None 42 | } 43 | } 44 | 45 | fn ssh_url(&self) -> String { 46 | self.ssh_url.clone() 47 | } 48 | 49 | fn http_url(&self) -> String { 50 | self.clone_url.clone() 51 | } 52 | 53 | fn private(&self) -> bool { 54 | self.private 55 | } 56 | } 57 | 58 | #[derive(Deserialize)] 59 | pub struct GithubApiErrorResponse { 60 | pub message: String, 61 | } 62 | 63 | impl JsonError for GithubApiErrorResponse { 64 | fn to_string(self) -> String { 65 | self.message 66 | } 67 | } 68 | 69 | pub struct Github { 70 | filter: Filter, 71 | secret_token: auth::AuthToken, 72 | } 73 | 74 | impl Provider for Github { 75 | type Project = GithubProject; 76 | type Error = GithubApiErrorResponse; 77 | 78 | fn new( 79 | filter: Filter, 80 | secret_token: auth::AuthToken, 81 | api_url_override: Option, 82 | ) -> Result { 83 | if api_url_override.is_some() { 84 | return Err("API URL overriding is not supported for Github".to_string()); 85 | } 86 | Ok(Self { 87 | filter, 88 | secret_token, 89 | }) 90 | } 91 | 92 | fn filter(&self) -> &Filter { 93 | &self.filter 94 | } 95 | 96 | fn secret_token(&self) -> &auth::AuthToken { 97 | &self.secret_token 98 | } 99 | 100 | fn auth_header_key() -> &'static str { 101 | "token" 102 | } 103 | 104 | fn get_user_projects( 105 | &self, 106 | user: &str, 107 | ) -> Result, ApiErrorResponse> { 108 | self.call_list( 109 | &format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)), 110 | Some(ACCEPT_HEADER_JSON), 111 | ) 112 | } 113 | 114 | fn get_group_projects( 115 | &self, 116 | group: &str, 117 | ) -> Result, ApiErrorResponse> { 118 | self.call_list( 119 | &format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)), 120 | Some(ACCEPT_HEADER_JSON), 121 | ) 122 | } 123 | 124 | fn get_accessible_projects( 125 | &self, 126 | ) -> Result, ApiErrorResponse> { 127 | self.call_list( 128 | &format!("{GITHUB_API_BASEURL}/user/repos"), 129 | Some(ACCEPT_HEADER_JSON), 130 | ) 131 | } 132 | 133 | fn get_current_user(&self) -> Result> { 134 | Ok(super::call::( 135 | &format!("{GITHUB_API_BASEURL}/user"), 136 | Self::auth_header_key(), 137 | self.secret_token(), 138 | Some(ACCEPT_HEADER_JSON), 139 | )? 140 | .username) 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/provider/gitlab.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | use super::auth; 4 | use super::escape; 5 | use super::ApiErrorResponse; 6 | use super::Filter; 7 | use super::JsonError; 8 | use super::Project; 9 | use super::Provider; 10 | 11 | const ACCEPT_HEADER_JSON: &str = "application/json"; 12 | const GITLAB_API_BASEURL: &str = match option_env!("GITLAB_API_BASEURL") { 13 | Some(url) => url, 14 | None => "https://gitlab.com", 15 | }; 16 | 17 | #[derive(Deserialize)] 18 | #[serde(rename_all = "lowercase")] 19 | pub enum GitlabVisibility { 20 | Private, 21 | Internal, 22 | Public, 23 | } 24 | 25 | #[derive(Deserialize)] 26 | pub struct GitlabProject { 27 | #[serde(rename = "path")] 28 | pub name: String, 29 | pub path_with_namespace: String, 30 | pub http_url_to_repo: String, 31 | pub ssh_url_to_repo: String, 32 | pub visibility: GitlabVisibility, 33 | } 34 | 35 | #[derive(Deserialize)] 36 | struct GitlabUser { 37 | pub username: String, 38 | } 39 | 40 | impl Project for GitlabProject { 41 | fn name(&self) -> String { 42 | self.name.clone() 43 | } 44 | 45 | fn namespace(&self) -> Option { 46 | if let Some((namespace, _name)) = self.path_with_namespace.rsplit_once('/') { 47 | Some(namespace.to_string()) 48 | } else { 49 | None 50 | } 51 | } 52 | 53 | fn ssh_url(&self) -> String { 54 | self.ssh_url_to_repo.clone() 55 | } 56 | 57 | fn http_url(&self) -> String { 58 | self.http_url_to_repo.clone() 59 | } 60 | 61 | fn private(&self) -> bool { 62 | !matches!(self.visibility, GitlabVisibility::Public) 63 | } 64 | } 65 | 66 | #[derive(Deserialize)] 67 | pub struct GitlabApiErrorResponse { 68 | #[serde(alias = "error_description", alias = "error")] 69 | pub message: String, 70 | } 71 | 72 | impl JsonError for GitlabApiErrorResponse { 73 | fn to_string(self) -> String { 74 | self.message 75 | } 76 | } 77 | 78 | pub struct Gitlab { 79 | filter: Filter, 80 | secret_token: auth::AuthToken, 81 | api_url_override: Option, 82 | } 83 | 84 | impl Gitlab { 85 | fn api_url(&self) -> String { 86 | match self.api_url_override { 87 | Some(ref s) => s.trim_end_matches('/').to_string(), 88 | None => GITLAB_API_BASEURL.to_string(), 89 | } 90 | } 91 | } 92 | 93 | impl Provider for Gitlab { 94 | type Project = GitlabProject; 95 | type Error = GitlabApiErrorResponse; 96 | 97 | fn new( 98 | filter: Filter, 99 | secret_token: auth::AuthToken, 100 | api_url_override: Option, 101 | ) -> Result { 102 | Ok(Self { 103 | filter, 104 | secret_token, 105 | api_url_override, 106 | }) 107 | } 108 | 109 | fn filter(&self) -> &Filter { 110 | &self.filter 111 | } 112 | 113 | fn secret_token(&self) -> &auth::AuthToken { 114 | &self.secret_token 115 | } 116 | 117 | fn auth_header_key() -> &'static str { 118 | "bearer" 119 | } 120 | 121 | fn get_user_projects( 122 | &self, 123 | user: &str, 124 | ) -> Result, ApiErrorResponse> { 125 | self.call_list( 126 | &format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)), 127 | Some(ACCEPT_HEADER_JSON), 128 | ) 129 | } 130 | 131 | fn get_group_projects( 132 | &self, 133 | group: &str, 134 | ) -> Result, ApiErrorResponse> { 135 | self.call_list( 136 | &format!( 137 | "{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false", 138 | self.api_url(), 139 | escape(group), 140 | ), 141 | Some(ACCEPT_HEADER_JSON), 142 | ) 143 | } 144 | 145 | fn get_accessible_projects( 146 | &self, 147 | ) -> Result, ApiErrorResponse> { 148 | self.call_list( 149 | &format!("{}/api/v4/projects", self.api_url(),), 150 | Some(ACCEPT_HEADER_JSON), 151 | ) 152 | } 153 | 154 | fn get_current_user(&self) -> Result> { 155 | Ok(super::call::( 156 | &format!("{}/api/v4/user", self.api_url()), 157 | Self::auth_header_key(), 158 | self.secret_token(), 159 | Some(ACCEPT_HEADER_JSON), 160 | )? 161 | .username) 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /tests/helpers.rs: -------------------------------------------------------------------------------- 1 | use outdir_tempdir::TempDir; 2 | 3 | pub fn init_tmpdir() -> TempDir { 4 | let tmp_dir = TempDir::new().autorm(); 5 | println!("Temporary directory: {}", tmp_dir.path().display()); 6 | tmp_dir 7 | } 8 | 9 | pub fn cleanup_tmpdir(_tempdir: TempDir) { 10 | // cleanup on drop 11 | } 12 | -------------------------------------------------------------------------------- /tests/repo.rs: -------------------------------------------------------------------------------- 1 | use grm::repo::*; 2 | 3 | mod helpers; 4 | 5 | use helpers::*; 6 | 7 | #[test] 8 | fn open_empty_repo() { 9 | let tmpdir = init_tmpdir(); 10 | assert!(matches!( 11 | RepoHandle::open(tmpdir.path(), true), 12 | Err(RepoError { 13 | kind: RepoErrorKind::NotFound 14 | }) 15 | )); 16 | assert!(matches!( 17 | RepoHandle::open(tmpdir.path(), false), 18 | Err(RepoError { 19 | kind: RepoErrorKind::NotFound 20 | }) 21 | )); 22 | cleanup_tmpdir(tmpdir); 23 | } 24 | 25 | #[test] 26 | fn create_repo() -> Result<(), Box> { 27 | let tmpdir = init_tmpdir(); 28 | let repo = RepoHandle::init(tmpdir.path(), false)?; 29 | assert!(!repo.is_bare()); 30 | assert!(repo.is_empty()?); 31 | cleanup_tmpdir(tmpdir); 32 | Ok(()) 33 | } 34 | 35 | #[test] 36 | fn create_repo_with_worktree() -> Result<(), Box> { 37 | let tmpdir = init_tmpdir(); 38 | let repo = RepoHandle::init(tmpdir.path(), true)?; 39 | assert!(repo.is_bare()); 40 | assert!(repo.is_empty()?); 41 | cleanup_tmpdir(tmpdir); 42 | Ok(()) 43 | } 44 | -------------------------------------------------------------------------------- /update-cargo-dependencies.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import subprocess 4 | 5 | import tomlkit 6 | 7 | with open("./Cargo.toml", "r") as cargo_config: 8 | cargo = tomlkit.parse(cargo_config.read()) 9 | 10 | update_necessary = True 11 | 12 | for tier in ["dependencies", "dev-dependencies"]: 13 | for name, dependency in cargo[tier].items(): 14 | version = dependency["version"].lstrip("=") 15 | 16 | args = [ 17 | "cargo", 18 | "upgrade", 19 | "--incompatible", 20 | "--pinned", 21 | "--ignore-rust-version", 22 | "--package", 23 | name, 24 | ] 25 | subprocess.run( 26 | args, 27 | check=True, 28 | ) 29 | 30 | with open("./Cargo.toml", "r") as cargo_config: 31 | cargo = tomlkit.parse(cargo_config.read()) 32 | 33 | new_version = {dep: cfg for dep, cfg in cargo[tier].items() if dep == name}[ 34 | name 35 | ]["version"].lstrip("=") 36 | 37 | subprocess.run( 38 | ["cargo", "update", "--recursive", "--package", name], 39 | check=True, 40 | ) 41 | 42 | if version != new_version: 43 | update_necessary = True 44 | 45 | message = f"dep: Update {name} to {new_version}" 46 | 47 | cmd = subprocess.run( 48 | [ 49 | "git", 50 | "commit", 51 | "--message", 52 | message, 53 | "./Cargo.lock", 54 | "./Cargo.toml", 55 | ], 56 | check=True, 57 | ) 58 | 59 | # If only Cargo.lock changed but not the version of the dependency itself, 60 | # some transitive dependencies were updated 61 | else: 62 | cmd = subprocess.run( 63 | [ 64 | "git", 65 | "diff", 66 | "--stat", 67 | "--exit-code", 68 | "./Cargo.lock", 69 | ], 70 | ) 71 | 72 | if cmd.returncode == 1: 73 | message = f"dep: Update dependencies of {name}" 74 | 75 | cmd = subprocess.run( 76 | [ 77 | "git", 78 | "commit", 79 | "--message", 80 | message, 81 | "./Cargo.lock", 82 | ], 83 | check=True, 84 | ) 85 | 86 | # assert that Cargo.toml is not modified 87 | subprocess.run( 88 | [ 89 | "git", 90 | "diff", 91 | "--stat", 92 | "--exit-code", 93 | "./Cargo.toml", 94 | ], 95 | check=True, 96 | ) 97 | 98 | 99 | if update_necessary is False: 100 | print("Everything up to date") 101 | -------------------------------------------------------------------------------- /update-pkgbuild.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if ! git remote | grep -q ^aur$; then 4 | git remote add aur ssh://aur@aur.archlinux.org/grm-git.git 5 | fi 6 | 7 | git subtree push --prefix pkg/arch/ aur master 8 | 9 | git remote rm aur 10 | --------------------------------------------------------------------------------