├── .codespell-ignore-words
├── .codespellrc
├── .containerignore
├── .dockerignore
├── .github
├── CODEOWNERS
├── FUNDING.yml
├── dependabot.yml
└── workflows
│ ├── containers.yaml
│ ├── lint.yaml
│ ├── python-package.yml
│ ├── rust.yml
│ └── sql.yaml
├── .gitignore
├── .testr.conf
├── .yamllint
├── AUTHORS
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Cargo.lock
├── Cargo.toml
├── Dockerfile_archive
├── Dockerfile_auto_upload
├── Dockerfile_bzr_store
├── Dockerfile_differ
├── Dockerfile_git_store
├── Dockerfile_mail_filter
├── Dockerfile_ognibuild_dep
├── Dockerfile_publish
├── Dockerfile_runner
├── Dockerfile_site
├── Dockerfile_worker
├── LICENSE
├── Makefile
├── README.md
├── TODO
├── TODO.rust
├── archive
├── Cargo.toml
├── src
│ ├── lib.rs
│ └── scanner.rs
└── tests
│ └── data
│ ├── hello_2.10-3.debian.tar.xz
│ ├── hello_2.10-3.dsc
│ ├── hello_2.10-3_amd64.deb
│ ├── hello_2.10.orig.tar.gz
│ └── hello_2.10.orig.tar.gz.asc
├── auto-upload
├── Cargo.toml
└── src
│ └── lib.rs
├── autopkgtest-wrapper
├── build.rs
├── bzr-store
├── Cargo.toml
└── src
│ └── lib.rs
├── common-py
├── Cargo.toml
└── src
│ ├── artifacts.rs
│ ├── config.rs
│ ├── debdiff.rs
│ ├── io.rs
│ ├── lib.rs
│ ├── logs.rs
│ └── vcs.rs
├── create-sbuild-chroot-schroot.py
├── create-sbuild-chroot-unshare.py
├── devnotes
├── adding-a-new-campaign.rst
├── branch-names.rst
├── glossary.rst
└── overview.rst
├── differ-py
├── Cargo.toml
└── src
│ └── lib.rs
├── differ
├── Cargo.toml
└── src
│ ├── diffoscope.rs
│ ├── lib.rs
│ └── main.rs
├── docs
├── Dockerfiles_.md
├── flow.md
├── glossary.md
├── production.md
└── structure.md
├── examples
└── janitor.rules
├── git-store
├── Cargo.toml
└── src
│ ├── lib.rs
│ └── main.rs
├── helpers
├── cleanup-repositories.py
├── migrate-logs.py
└── render-publish-template.py
├── janitor.conf.example
├── mail-filter
├── Cargo.toml
├── src
│ ├── bin
│ │ └── janitor-mail-filter.rs
│ ├── lib.rs
│ └── tests.rs
└── tests
│ └── data
│ ├── github-merged-email.txt
│ └── gitlab-merged-email.txt
├── publish-py
├── Cargo.toml
└── src
│ └── lib.rs
├── publish.sh
├── publish
├── Cargo.toml
└── src
│ ├── bin
│ ├── janitor-publish.rs
│ └── publish-one.rs
│ ├── lib.rs
│ ├── proposal_info.rs
│ ├── publish_one.rs
│ ├── rate_limiter.rs
│ ├── state.rs
│ └── web.rs
├── pull_worker.sh
├── py
└── janitor
│ ├── __init__.py
│ ├── _common.pyi
│ ├── _launchpad.py
│ ├── _publish.pyi
│ ├── _runner.pyi
│ ├── _site.pyi
│ ├── artifacts.py
│ ├── bzr_store.py
│ ├── config.proto
│ ├── config.py
│ ├── debian
│ ├── __init__.py
│ ├── archive.py
│ ├── auto_upload.py
│ ├── debdiff.py
│ └── debian.sql
│ ├── differ.py
│ ├── diffoscope.py
│ ├── git_store.py
│ ├── logs.py
│ ├── publish.py
│ ├── py.typed
│ ├── queue.py
│ ├── review.py
│ ├── runner.py
│ ├── schedule.py
│ ├── site
│ ├── __init__.py
│ ├── _static
│ │ ├── alabaster.css
│ │ ├── datatables.css
│ │ ├── file.png
│ │ ├── janitor.css
│ │ ├── janitor.js
│ │ ├── lintian.css
│ │ ├── pygments.css
│ │ └── typeahead.css
│ ├── api.py
│ ├── common.py
│ ├── cupboard
│ │ ├── __init__.py
│ │ ├── api.py
│ │ ├── merge_proposals.py
│ │ ├── publish.py
│ │ ├── queue.py
│ │ └── review.py
│ ├── merge_proposals.py
│ ├── openid.py
│ ├── pkg.py
│ ├── pubsub.py
│ ├── setup.py
│ ├── simple.py
│ ├── templates
│ │ ├── about.html
│ │ ├── codeblock.html
│ │ ├── credentials.html
│ │ ├── cupboard
│ │ │ ├── broken-merge-proposals.html
│ │ │ ├── changeset-list.html
│ │ │ ├── changeset.html
│ │ │ ├── default-evaluate.html
│ │ │ ├── done-list.html
│ │ │ ├── failure-stage-index.html
│ │ │ ├── history.html
│ │ │ ├── merge-proposal.html
│ │ │ ├── merge-proposals.html
│ │ │ ├── never-processed.html
│ │ │ ├── publish-history.html
│ │ │ ├── publish.html
│ │ │ ├── queue.html
│ │ │ ├── ready-list.html
│ │ │ ├── rejected.html
│ │ │ ├── reprocess-logs.html
│ │ │ ├── result-code-index.html
│ │ │ ├── result-code.html
│ │ │ ├── review-done.html
│ │ │ ├── review.html
│ │ │ ├── run.html
│ │ │ ├── sidebar.html
│ │ │ ├── start.html
│ │ │ ├── util.html
│ │ │ └── workers.html
│ │ ├── faq-api.html
│ │ ├── faq-auto-push.html
│ │ ├── faq-incorrect.html
│ │ ├── faq-out-of-date-proposal.html
│ │ ├── faq-supported-vcs.html
│ │ ├── footer.html
│ │ ├── generic
│ │ │ ├── candidates.html
│ │ │ ├── codebase.html
│ │ │ ├── done.html
│ │ │ ├── sidebar.html
│ │ │ ├── start.html
│ │ │ └── summary.html
│ │ ├── index.html
│ │ ├── inputs.html
│ │ ├── layout.html
│ │ ├── lintian_util.html
│ │ ├── log-index.html
│ │ ├── login.html
│ │ ├── merge-proposal.html
│ │ ├── merge-proposals.html
│ │ ├── ready-list.html
│ │ ├── repo-list.html
│ │ ├── result-codes
│ │ │ ├── 401-unauthorized.html
│ │ │ ├── 502-bad-gateway.html
│ │ │ ├── autopkgtest-missing-node-module.html
│ │ │ ├── before-quilt-error.html
│ │ │ ├── branch-unavailable.html
│ │ │ ├── build-command-missing.html
│ │ │ ├── build-debhelper-pattern-not-found.html
│ │ │ ├── build-dh-addon-load-failure.html
│ │ │ ├── build-failed-stage-build.html
│ │ │ ├── build-missing-go-package.html
│ │ │ ├── build-missing-php-class.html
│ │ │ ├── build-missing-python-module.html
│ │ │ ├── build-upstart-file-present.html
│ │ │ ├── codemod-command-failed.html
│ │ │ ├── command-failed.html
│ │ │ ├── control-file-is-generated.html
│ │ │ ├── control-files-in-root.html
│ │ │ ├── dist-apt-broken-packages.html
│ │ │ ├── dist-command-failed.html
│ │ │ ├── dist-missing-automake-input.html
│ │ │ ├── dist-missing-file.html
│ │ │ ├── install-deps-unsatisfied-dependencies.html
│ │ │ ├── invalid-path-normalization.html
│ │ │ ├── invalid-upstream-version-format.html
│ │ │ ├── missing-control-file.html
│ │ │ ├── native-package.html
│ │ │ ├── new-upstream-missing.html
│ │ │ ├── no-upstream-locations-known.html
│ │ │ ├── package-in-subpath.html
│ │ │ ├── previous-upstream-missing.html
│ │ │ ├── quilt-refresh-error.html
│ │ │ ├── roundtripping-error.html
│ │ │ ├── run-disappeared.html
│ │ │ ├── timeout.html
│ │ │ ├── unpack-unexpected-local-upstream-changes.html
│ │ │ ├── unparseable-changelog.html
│ │ │ ├── unsupported-vcs-protocol.html
│ │ │ ├── upstream-branch-unavailable.html
│ │ │ ├── upstream-branch-unknown.html
│ │ │ ├── upstream-merged-conflicts.html
│ │ │ ├── upstream-unsupported-vcs-hg.html
│ │ │ ├── upstream-unsupported-vcs-svn.html
│ │ │ ├── upstream-unsupported-vcs.html
│ │ │ ├── upstream-version-missing-in-upstream-branch.html
│ │ │ ├── uscan-error.html
│ │ │ ├── watch-syntax-error.html
│ │ │ ├── worker-failure.html
│ │ │ └── worker-timeout.html
│ │ ├── review_util.html
│ │ ├── run_util.html
│ │ └── webhook.html
│ └── webhook.py
│ ├── state.py
│ ├── state.sql
│ ├── vcs.py
│ └── worker_creds.py
├── pyproject.toml
├── reprocess-build-results.py
├── reschedule.py
├── run_worker.sh
├── runner-py
├── Cargo.toml
└── src
│ └── lib.rs
├── runner
├── Cargo.toml
└── src
│ ├── backchannel.rs
│ ├── config_generator.rs
│ ├── lib.rs
│ ├── main.rs
│ └── web.rs
├── sbuildrc.example
├── setup.py
├── sieve
├── README
└── janitor.sieve
├── site-py
├── Cargo.toml
└── src
│ └── lib.rs
├── site
├── Cargo.toml
└── src
│ ├── analyze.rs
│ └── lib.rs
├── src
├── analyze_log.rs
├── api
│ ├── mod.rs
│ ├── runner.rs
│ └── worker.rs
├── artifacts
│ ├── gcs.rs
│ ├── local.rs
│ └── mod.rs
├── bin
│ └── janitor-schedule.rs
├── config.rs
├── debdiff.rs
├── lib.rs
├── logging.rs
├── logs
│ ├── filesystem.rs
│ ├── gcs.rs
│ └── mod.rs
├── prometheus.rs
├── publish.rs
├── queue.rs
├── reprocess_logs.rs
├── schedule.rs
├── state.rs
└── vcs.rs
├── tests
├── __init__.py
├── conftest.py
├── test_archive.py
├── test_artifacts.py
├── test_bzr_store.py
├── test_config.py
├── test_core.py
├── test_cupboard.py
├── test_debdiff.py
├── test_debian.py
├── test_differ.py
├── test_git_store.py
├── test_launchpad.py
├── test_logs.py
├── test_queue.py
├── test_runner.py
├── test_site.py
├── test_site_macros.py
├── test_site_simple.py
└── test_vcs.py
├── tox.ini
└── worker
├── Cargo.toml
├── src
├── bin
│ ├── debian-build.rs
│ ├── dist.rs
│ ├── generic-build.rs
│ └── worker.rs
├── client.rs
├── debian
│ ├── build.rs
│ ├── lintian.rs
│ └── mod.rs
├── generic
│ └── mod.rs
├── lib.rs
├── tee.rs
├── vcs.rs
└── web.rs
└── templates
├── artifact_index.html
├── index.html
└── log_index.html
/.codespell-ignore-words:
--------------------------------------------------------------------------------
1 | crate
2 | buildd
3 | fpr
4 | afile
5 | nd
6 | ser
7 |
--------------------------------------------------------------------------------
/.codespellrc:
--------------------------------------------------------------------------------
1 | [codespell]
2 | skip = .git,.mypy_cache,build,testdata,target,htmlcov,Cargo.lock
3 | ignore-words = .codespell-ignore-words
4 |
--------------------------------------------------------------------------------
/.containerignore:
--------------------------------------------------------------------------------
1 | .eggs
2 | .pytest_cache
3 | .mypy_cache
4 | __pycache__
5 | lib
6 | lib64/
7 | share/
8 | bin/
9 | man/
10 | *~
11 | .git/
12 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | credentials.*
2 | *.secret
3 | .mypy_cache
4 | .pytest_cache
5 | .ruff_cache
6 | .git
7 | .bzr
8 | .eggs
9 | *~
10 | target/
11 | k8s
12 | *.conf
13 | bin/
14 | htmlcov/
15 | tests/
16 | build/
17 | lib/
18 | include/
19 | .venv
20 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @jelmer
2 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | ---
2 | github: jelmer
3 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # Please see the documentation for all configuration options:
2 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
3 | ---
4 | version: 2
5 | updates:
6 | - package-ecosystem: "cargo"
7 | directory: "/"
8 | schedule:
9 | interval: "weekly"
10 | - package-ecosystem: "github-actions"
11 | directory: "/"
12 | schedule:
13 | interval: weekly
14 | - package-ecosystem: "pip"
15 | directory: "/"
16 | schedule:
17 | interval: "weekly"
18 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Linting
3 |
4 | "on":
5 | push:
6 | pull_request:
7 | schedule:
8 | - cron: '0 6 * * *' # Daily 6AM UTC build
9 |
10 | jobs:
11 | yamllint:
12 | name: YAML Lint
13 | runs-on: ubuntu-latest
14 |
15 | # Steps to perform in job
16 | steps:
17 | - name: Checkout code
18 | uses: actions/checkout@v4
19 |
20 | - name: YAML style checks (yamllint)
21 | if: always()
22 | run: |
23 | set -x
24 | pip3 install --break-system-packages --upgrade \
25 | yamllint
26 | make yamllint
27 |
28 | djlint:
29 | name: HTML Lint
30 | runs-on: ubuntu-latest
31 |
32 | # Steps to perform in job
33 | steps:
34 | - name: Checkout code
35 | uses: actions/checkout@v4
36 |
37 | - name: HTML style checks (djLint)
38 | if: always()
39 | run: |
40 | set -x
41 | pip3 install --break-system-packages --upgrade \
42 | djlint
43 | make djlint
44 |
45 | codespell:
46 | name: Check common misspellings
47 | runs-on: ubuntu-latest
48 |
49 | # Steps to perform in job
50 | steps:
51 | - name: Checkout code
52 | uses: actions/checkout@v4
53 |
54 | - name: Check common misspellings (codespell)
55 | if: always()
56 | run: |
57 | set -x
58 | pip3 install --break-system-packages --upgrade \
59 | codespell
60 | codespell
61 |
--------------------------------------------------------------------------------
/.github/workflows/rust.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Rust build
3 |
4 | "on":
5 | push:
6 | pull_request:
7 |
8 | env:
9 | CARGO_TERM_COLOR: always
10 |
11 | jobs:
12 | rust-build:
13 |
14 | runs-on: ${{ matrix.os }}
15 | strategy:
16 | matrix:
17 | os: [ubuntu-latest]
18 | fail-fast: false
19 |
20 | steps:
21 | - uses: actions/checkout@v4
22 | - name: Install dependencies
23 | run: |
24 | sudo apt -y update
25 | sudo apt -y install devscripts libapt-pkg-dev libtdb-dev libssl-dev \
26 | pkg-config libgpgme-dev protobuf-compiler diffoscope
27 | - name: Upgrade pip
28 | run: python -m pip install --upgrade pip setuptools_rust setuptools
29 | - name: Install breezy, diffoscope
30 | run: python -m pip install --upgrade breezy diffoscope jsondiff \
31 | "brz-debian@git+https://github.com/breezy-team/breezy-debian"
32 | # TODO(jelmer): Add proper test isolation so this isn't necessary
33 | - name: Setup bzr identity
34 | run: brz whoami "CI "
35 | - name: Build
36 | run: cargo build --verbose --workspace
37 | - name: Run tests
38 | run: cargo test --verbose --workspace
39 |
40 | rust-fmt:
41 | runs-on: ubuntu-latest
42 | steps:
43 | - uses: actions/checkout@v4
44 | - name: Install rustfmt
45 | run: sudo apt -y install rustfmt cargo
46 | - name: Check formatting
47 | run: cargo fmt --all -- --check
48 |
--------------------------------------------------------------------------------
/.github/workflows/sql.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: SQL - Check database
3 |
4 | "on":
5 | push:
6 | branches:
7 | - main
8 | pull_request:
9 | schedule:
10 | - cron: '0 6 * * *' # Daily 6AM UTC build
11 |
12 | jobs:
13 | build:
14 | name: Test SQL
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - name: Checkout code
19 | uses: actions/checkout@v4
20 |
21 | - name: Install dependencies
22 | run: |
23 | set -x
24 | sudo apt-get update --yes
25 | PSQL_DEB=$( apt-cache search 'postgresql-.*-debversion' \
26 | | awk '{print $1}' \
27 | | tail -n 1 )
28 | sudo apt-get satisfy --yes --no-install-recommends \
29 | postgresql \
30 | ${PSQL_DEB} \
31 | postgresql-common
32 |
33 | - name: Load SQL
34 | run: |
35 | set -x
36 | set -o pipefail
37 | cat py/janitor/state.sql py/janitor/debian/debian.sql \
38 | | pg_virtualenv psql -v ON_ERROR_STOP=1
39 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .coverage
2 | *.swp
3 | build/
4 | __pycache__
5 | .plugins
6 | .pybuild
7 | _build/*
8 | *.pyc
9 | *.rej
10 | *.orig
11 | *~
12 | site/_build
13 | state.db
14 | site/history.rst
15 | build.log
16 | debian_janitor.egg-info/
17 | .mypy_cache/
18 | .venv
19 | .eggs
20 | janitor.egg-info
21 | pyvenv.cfg
22 | lib
23 | lib64
24 | /bin
25 | man
26 | share
27 | .hypothesis
28 | .tox
29 | Cargo.lock
30 | target
31 | .testrepository
32 | py/janitor/_*.cpython-*.so
33 |
--------------------------------------------------------------------------------
/.testr.conf:
--------------------------------------------------------------------------------
1 | [DEFAULT]
2 | test_command=PYTHONPATH=`pwd`:$PYTHONPATH pytest --subunit tests
3 | test_id_option=--load-list $IDFILE
4 | test_list_option=--list
5 |
--------------------------------------------------------------------------------
/.yamllint:
--------------------------------------------------------------------------------
1 | ---
2 | extends: default
3 |
4 | rules:
5 | # 80 chars should be enough, but don't fail if a line is longer
6 | line-length:
7 | max: 80
8 | level: warning
9 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | Jelmer Vernooij
2 |
3 | There are a lot of people who contributed ideas and feedback to the Janitor.
4 | Some of them are listed here; if you're missing, please let me know!
5 |
6 | Thanks:
7 |
8 | Perry Lorrier
9 | Christoph Berg
10 | Raphael Hertzog
11 | Gregor Herrman
12 | Holger Levsen
13 | Helmut Grohne
14 | Mattia Rizzolo
15 | Colin Watson
16 |
--------------------------------------------------------------------------------
/Dockerfile_archive:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | dpkg-dev \
23 | git \
24 | libapt-pkg-dev \
25 | python3-gpg \
26 | python3-pip \
27 | && apt-get clean
28 |
29 | COPY . /code
30 |
31 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,archive]" \
32 | && rm -rf /code
33 |
34 | EXPOSE 9914
35 |
36 | ENTRYPOINT ["janitor-archive", "--port=9914", "--listen-address=0.0.0.0"]
37 |
--------------------------------------------------------------------------------
/Dockerfile_auto_upload:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | python3-pip \
23 | && apt-get clean
24 |
25 | COPY . /code
26 |
27 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,auto-upload]" \
28 | && rm -rf /code
29 |
30 | EXPOSE 9933
31 |
32 | ENTRYPOINT ["janitor-auto-upload", "--port=9933", "--listen-address=0.0.0.0"]
33 |
--------------------------------------------------------------------------------
/Dockerfile_bzr_store:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | python3-gpg \
23 | python3-pip \
24 | && apt-get clean
25 |
26 | COPY . /code
27 |
28 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,bzr-store]" \
29 | && rm -rf /code
30 |
31 | VOLUME /bzr
32 |
33 | EXPOSE 9929
34 |
35 | EXPOSE 9930
36 |
37 | ENTRYPOINT ["janitor-bzr-store", "--port=9929", "--public-port=9930", "--listen-address=0.0.0.0"]
38 |
--------------------------------------------------------------------------------
/Dockerfile_differ:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | python3-gpg \
23 | python3-pip \
24 | && apt-get clean
25 |
26 | COPY . /code
27 |
28 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,differ]" \
29 | && rm -rf /code
30 |
31 | EXPOSE 9920
32 |
33 | ENTRYPOINT ["janitor-differ", "--port=9920", "--listen-address=0.0.0.0"]
34 |
--------------------------------------------------------------------------------
/Dockerfile_git_store:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | git \
23 | python3-gpg \
24 | python3-pip \
25 | && apt-get clean
26 |
27 | COPY . /code
28 |
29 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,git-store]" \
30 | && rm -rf /code
31 |
32 | VOLUME /git
33 |
34 | EXPOSE 9923
35 |
36 | EXPOSE 9924
37 |
38 | ENTRYPOINT ["janitor-git-store", "--port=9923", "--public-port=9924", "--listen-address=0.0.0.0"]
39 |
--------------------------------------------------------------------------------
/Dockerfile_mail_filter:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ca-certificates \
14 | cargo \
15 | libpython3-dev \
16 | libssl-dev \
17 | pkg-config \
18 | protobuf-compiler \
19 | python3-minimal \
20 | && apt-get clean
21 |
22 | COPY . /code
23 |
24 | RUN cargo build --release --manifest-path /code/mail-filter/Cargo.toml
25 |
26 | FROM docker.io/debian:testing-slim
27 |
28 | COPY --from=build /code/target/release/janitor-mail-filter /usr/local/bin/janitor-mail-filter
29 |
30 | ENTRYPOINT ["janitor-mail-filter"]
31 |
--------------------------------------------------------------------------------
/Dockerfile_ognibuild_dep:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS m4
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get satisfy --yes --no-install-recommends \
12 | apt-file \
13 | aptitude \
14 | && apt-get clean \
15 | && apt-file update \
16 | && apt-file search /usr/share/aclocal/.*.m4 --regex -l | xargs aptitude -y install
17 |
18 |
19 |
20 | # https://hub.docker.com/_/debian
21 | FROM docker.io/debian:testing-slim AS build
22 | MAINTAINER Jelmer Vernooij
23 |
24 | ARG DEBIAN_FRONTEND=noninteractive
25 |
26 | RUN apt-get update --yes \
27 | && apt-get install --yes --no-install-recommends \
28 | auto-apt-proxy \
29 | iproute2 \
30 | && apt-get satisfy --yes --no-install-recommends \
31 | ca-certificates \
32 | cargo \
33 | gcc \
34 | git \
35 | libc6-dev \
36 | libpython3-dev \
37 | libssl-dev \
38 | pkg-config \
39 | python3-minimal \
40 | && apt-get clean \
41 | && git clone https://github.com/jelmer/ognibuild.git /code/ \
42 | && cd /code/ \
43 | && cargo build --verbose --release #-p dep-server
44 |
45 |
46 |
47 | # https://hub.docker.com/_/debian
48 | FROM docker.io/debian:testing-slim
49 | MAINTAINER Jelmer Vernooij
50 |
51 | ARG DEBIAN_FRONTEND=noninteractive
52 |
53 | RUN apt-get update --yes \
54 | && apt-get install --yes --no-install-recommends \
55 | auto-apt-proxy \
56 | iproute2 \
57 | && apt-get upgrade --yes \
58 | && apt-get satisfy --yes --no-install-recommends \
59 | libpython3-dev \
60 | python3-breezy \
61 | && apt-get clean \
62 | && rm -rf /usr/share/aclocal
63 |
64 | COPY --from=m4 /usr/share/aclocal /usr/share/aclocal
65 |
66 | COPY --from=build /code/target/release/ /usr/local/bin/
67 |
68 | EXPOSE 9934
69 |
70 | # $ janitor-ognibuild
71 | ENTRYPOINT ["dep-server", "--port=9934", "--listen-address=0.0.0.0"]
72 |
--------------------------------------------------------------------------------
/Dockerfile_publish:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | python3-gpg \
23 | python3-pip \
24 | && apt-get clean
25 |
26 | COPY . /code
27 |
28 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,publish]" \
29 | && rm -rf /code
30 |
31 | EXPOSE 9912
32 |
33 | ENTRYPOINT ["janitor-publish", "--port=9912", "--listen-address=0.0.0.0"]
34 |
--------------------------------------------------------------------------------
/Dockerfile_runner:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get upgrade --yes \
12 | && apt-get satisfy --yes --no-install-recommends \
13 | ## Standard packages: ./CONTRIBUTING.md
14 | cargo \
15 | g++ \
16 | gcc \
17 | libpython3-dev \
18 | libssl-dev \
19 | pkg-config \
20 | protobuf-compiler \
21 | ## Extra packages
22 | dpkg-dev \
23 | git \
24 | libapt-pkg-dev \
25 | python3-gpg \
26 | python3-pip \
27 | && apt-get clean
28 |
29 | COPY . /code
30 |
31 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,runner]" \
32 | && rm -rf /code
33 |
34 | EXPOSE 9911
35 |
36 | EXPOSE 9919
37 |
38 | ENTRYPOINT ["janitor-runner", "--port=9911", "--public-port=9919", "--listen-address=0.0.0.0"]
39 |
--------------------------------------------------------------------------------
/Dockerfile_site:
--------------------------------------------------------------------------------
1 | # TODO: config
2 | # TODO: service discovery
3 |
4 | # https://hub.docker.com/_/debian
5 | FROM docker.io/debian:testing-slim AS build
6 | MAINTAINER Jelmer Vernooij
7 |
8 | ARG DEBIAN_FRONTEND=noninteractive
9 |
10 | RUN apt-get update --yes \
11 | && apt-get install --yes --no-install-recommends \
12 | auto-apt-proxy \
13 | iproute2 \
14 | && apt-get upgrade --yes \
15 | && apt-get satisfy --yes --no-install-recommends \
16 | ## Standard packages: ./CONTRIBUTING.md
17 | cargo \
18 | g++ \
19 | gcc \
20 | libpython3-dev \
21 | libssl-dev \
22 | pkg-config \
23 | protobuf-compiler \
24 | ## Extra packages
25 | libjs-jquery-datatables \
26 | python3-gpg \
27 | python3-pip \
28 | && apt-get clean
29 |
30 | COPY . /code
31 |
32 | RUN pip3 install --break-system-packages --upgrade "/code[gcp,site]" \
33 | && rm -rf /code
34 |
35 | EXPOSE 8080
36 |
37 | EXPOSE 8090
38 |
39 | ENTRYPOINT ["janitor-site", "--port=8080", "--public-port=8090", "--host=0.0.0.0"]
40 |
--------------------------------------------------------------------------------
/Dockerfile_worker:
--------------------------------------------------------------------------------
1 | # https://hub.docker.com/_/debian
2 | FROM docker.io/debian:testing-slim AS build
3 | MAINTAINER Jelmer Vernooij
4 |
5 | ARG DEBIAN_FRONTEND=noninteractive
6 |
7 | RUN apt-get update --yes \
8 | && apt-get install --yes --no-install-recommends \
9 | auto-apt-proxy \
10 | iproute2 \
11 | && apt-get satisfy --yes --no-install-recommends \
12 | ca-certificates \
13 | cargo \
14 | libpython3-dev \
15 | libssl-dev \
16 | pkg-config \
17 | protobuf-compiler \
18 | python3-minimal \
19 | && apt-get clean
20 |
21 | COPY . /code
22 |
23 | RUN cargo build --verbose --release --manifest-path /code/worker/Cargo.toml
24 |
25 |
26 |
27 | # https://hub.docker.com/_/debian
28 | FROM docker.io/debian:testing-slim
29 | MAINTAINER Jelmer Vernooij
30 |
31 | ARG DEBIAN_FRONTEND=noninteractive
32 |
33 | RUN apt-get update --yes \
34 | && apt-get install --yes --no-install-recommends \
35 | auto-apt-proxy \
36 | iproute2 \
37 | && apt-get upgrade --yes \
38 | && apt-get satisfy --yes --no-install-recommends \
39 | libpython3-dev \
40 | python3-breezy \
41 | dpkg-dev \
42 | && apt-get clean
43 |
44 | COPY --from=build /code/target/release/janitor-worker /usr/local/bin/janitor-worker
45 |
46 | COPY autopkgtest-wrapper /usr/local/bin/autopkgtest-wrapper
47 |
48 | ENV AUTOPKGTEST=/usr/local/bin/autopkgtest-wrapper
49 |
50 | EXPOSE 9821
51 |
52 | ENTRYPOINT ["janitor-worker", "--port=9821", "--listen-address=0.0.0.0"]
53 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | DOCKER_TAG ?= latest
2 | PYTHON ?= python3
3 | SHA = $(shell git rev-parse HEAD)
4 | DOCKERFILES = $(shell ls Dockerfile_* | sed 's/Dockerfile_//' )
5 | DOCKER_TARGETS := $(patsubst %,docker-%,$(DOCKERFILES))
6 | BUILD_TARGETS := $(patsubst %,build-%,$(DOCKERFILES))
7 | PUSH_TARGETS := $(patsubst %,push-%,$(DOCKERFILES))
8 |
9 | .PHONY: all check
10 |
11 | build-inplace:
12 | $(PYTHON) setup.py build_ext -i
13 |
14 | all: core
15 |
16 | core: py/janitor/site/_static/pygments.css build-inplace
17 |
18 | check:: typing
19 |
20 | check:: test
21 |
22 | check:: style
23 |
24 | check:: ruff
25 |
26 | check:: check-format
27 |
28 | check-format:: check-ruff-format
29 |
30 | check-ruff-format:
31 | ruff format --check py tests
32 |
33 | check-format:: check-cargo-format
34 |
35 | check-cargo-format:
36 | cargo fmt --check --all
37 |
38 | ruff:
39 | ruff check py tests
40 |
41 | fix:: ruff-fix
42 |
43 | fix:: clippy-fix
44 |
45 | fix:: reformat
46 |
47 | clippy-fix:
48 | cargo clippy --fix --allow-dirty --allow-staged
49 |
50 | ruff-fix:
51 | ruff check --fix .
52 |
53 | reformat-ruff:
54 | ruff format py tests
55 |
56 | reformat:: reformat-ruff
57 |
58 | reformat::
59 | cargo fmt --all
60 |
61 | suite-references:
62 | git grep "\\(lintian-brush\|lintian-fixes\|debianize\|fresh-releases\|fresh-snapshots\\)" | grep -v .example
63 |
64 | test:: build-inplace
65 | PYTHONPATH=$(shell pwd)/py:$(PYTHONPATH) PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python $(PYTHON) -m pytest -vv tests
66 |
67 | test::
68 | cargo test
69 |
70 | style:: yamllint
71 |
72 | yamllint:
73 | yamllint -s .github/
74 |
75 | style:: djlint
76 |
77 | check-format:: check-html-format
78 |
79 | check-html-format:
80 | djlint --check py/janitor/site/templates/
81 |
82 | djlint:
83 | djlint py/janitor/site/templates
84 |
85 | typing:
86 | $(PYTHON) -m mypy py/janitor tests
87 |
88 | py/janitor/site/_static/pygments.css:
89 | pygmentize -S default -f html > $@
90 |
91 | clean:
92 |
93 | docker-%:
94 | $(MAKE) build-$*
95 | $(MAKE) push-$*
96 |
97 | build-%:
98 | buildah build --no-cache -t ghcr.io/jelmer/janitor/$*:$(DOCKER_TAG) -t ghcr.io/jelmer/janitor/$*:$(SHA) -f Dockerfile_$* .
99 |
100 | push-%:
101 | buildah push ghcr.io/jelmer/janitor/$*:$(DOCKER_TAG)
102 | buildah push ghcr.io/jelmer/janitor/$*:$(SHA)
103 |
104 | docker-all: $(DOCKER_TARGETS)
105 |
106 | build-all: $(BUILD_TARGETS)
107 |
108 | push-all: $(PUSH_TARGETS)
109 |
110 | reformat:: reformat-html
111 |
112 | reformat-html:
113 | djlint --reformat py/janitor/site/templates/
114 |
115 | codespell:
116 | codespell
117 |
--------------------------------------------------------------------------------
/TODO:
--------------------------------------------------------------------------------
1 | - Split out generic code from debian-janitor specific bits
2 |
--------------------------------------------------------------------------------
/TODO.rust:
--------------------------------------------------------------------------------
1 | * Convert queue to rust
2 | * Convert queue_processor to rust
3 | * Convert publish_one to rust
4 | * Convert LogFileManager to rust
5 | * Convert ArtifactManager to rust
6 |
--------------------------------------------------------------------------------
/archive/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-archive"
3 | version.workspace = true
4 | edition.workspace = true
5 |
6 | [dependencies]
7 | futures = "0.3.31"
8 | tokio = { workspace = true, features = ["full"] }
9 | tracing = "0.1.41"
10 | deb822-lossless.workspace = true
11 | debian-control.workspace = true
12 |
--------------------------------------------------------------------------------
/archive/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! Archive crate for the Janitor project.
2 | //!
3 | //! This crate provides functionality for working with package archives.
4 |
5 | #![deny(missing_docs)]
6 |
7 | use tracing::{debug, error, info};
8 |
9 | /// Temporary prefix used for archive operations.
10 | pub const TMP_PREFIX: &str = "janitor-apt";
11 | /// Default timeout for Google Cloud Storage operations in seconds.
12 | pub const DEFAULT_GCS_TIMEOUT: usize = 60 * 30;
13 |
14 | /// Scanner module for archive operations.
15 | pub mod scanner;
16 |
17 | // TODO(jelmer): Generate contents file
18 |
--------------------------------------------------------------------------------
/archive/tests/data/hello_2.10-3.debian.tar.xz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jelmer/janitor/358562478447842c256beddffd1227722b6d7ad1/archive/tests/data/hello_2.10-3.debian.tar.xz
--------------------------------------------------------------------------------
/archive/tests/data/hello_2.10-3.dsc:
--------------------------------------------------------------------------------
1 | -----BEGIN PGP SIGNED MESSAGE-----
2 | Hash: SHA256
3 |
4 | Format: 3.0 (quilt)
5 | Source: hello
6 | Binary: hello
7 | Architecture: any
8 | Version: 2.10-3
9 | Maintainer: Santiago Vila
10 | Homepage: https://www.gnu.org/software/hello/
11 | Standards-Version: 4.6.2
12 | Vcs-Browser: https://salsa.debian.org/sanvila/hello
13 | Vcs-Git: https://salsa.debian.org/sanvila/hello.git
14 | Testsuite: autopkgtest
15 | Build-Depends: debhelper-compat (= 13), help2man, texinfo
16 | Package-List:
17 | hello deb devel optional arch=any
18 | Checksums-Sha1:
19 | f7bebf6f9c62a2295e889f66e05ce9bfaed9ace3 725946 hello_2.10.orig.tar.gz
20 | 9dc7a584db576910856ac7aa5cffbaeefe9cf427 819 hello_2.10.orig.tar.gz.asc
21 | a2d122fd090dbab3d40b219a237fbb7d74f8023a 12684 hello_2.10-3.debian.tar.xz
22 | Checksums-Sha256:
23 | 31e066137a962676e89f69d1b65382de95a7ef7d914b8cb956f41ea72e0f516b 725946 hello_2.10.orig.tar.gz
24 | 4ea69de913428a4034d30dcdcb34ab84f5c4a76acf9040f3091f0d3fac411b60 819 hello_2.10.orig.tar.gz.asc
25 | 60ee7a466808301fbaa7fea2490b5e7a6d86f598956fb3e79c71b3295dc1f249 12684 hello_2.10-3.debian.tar.xz
26 | Files:
27 | 6cd0ffea3884a4e79330338dcc2987d6 725946 hello_2.10.orig.tar.gz
28 | e6074bb23a0f184e00fdfb5c546b3bc2 819 hello_2.10.orig.tar.gz.asc
29 | 27ab798c1d8d9048ffc8127e9b8dbfca 12684 hello_2.10-3.debian.tar.xz
30 |
31 | -----BEGIN PGP SIGNATURE-----
32 |
33 | iQEzBAEBCAAdFiEE1Uw7+v+wQt44LaXXQc5/C58bizIFAmOp5ssACgkQQc5/C58b
34 | izJEQwgAiB73GKmfMV5PPyysZhoruCJo5I3/egZXfzA4lKofsqDh/ItYdvev5Ijl
35 | Rrbmd6xOXxJcHSYIeeuMYwK3X0moOY1Qk3CqTO00yH9lHgZg1r7G0xqhTM0EX7Jz
36 | suUpdRX7liENsAdwHBAmW3F6Lh4MFmBYJ2UcaX/HN6XwEF19b+JRvN76kUH9kaUJ
37 | 037mwN/M9qfwaLvp3Dpm+p7G4CGLLEzIEJCwx8IPew74+YhsXknGuK/96VFngdwK
38 | LFH3j/Kutc952HdLkKOWCibYd1fklW7SMXmJfiYF0qN4LDmhVMdkfd9ToAGxPhwn
39 | pMBP096Z2WoW+Z7QVgzwG+MjbyPz2w==
40 | =kNoz
41 | -----END PGP SIGNATURE-----
42 |
--------------------------------------------------------------------------------
/archive/tests/data/hello_2.10-3_amd64.deb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jelmer/janitor/358562478447842c256beddffd1227722b6d7ad1/archive/tests/data/hello_2.10-3_amd64.deb
--------------------------------------------------------------------------------
/archive/tests/data/hello_2.10.orig.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jelmer/janitor/358562478447842c256beddffd1227722b6d7ad1/archive/tests/data/hello_2.10.orig.tar.gz
--------------------------------------------------------------------------------
/archive/tests/data/hello_2.10.orig.tar.gz.asc:
--------------------------------------------------------------------------------
1 | -----BEGIN PGP SIGNATURE-----
2 | Version: GnuPG v2
3 |
4 | iQIcBAABCAAGBQJUaJPFAAoJEKlVMkX96bc5EuMP/2Z8T2r+ZjJbveEfVKuY3LbG
5 | sPqZqI/t0WISsfhRen3R/tiis0lN8TWIdTFRnLqtxyqfzDDtgzrPg8gwICFYxE6W
6 | ffVvgbbDA14EuatWHfnAK1SjWsaemJIO1rGROgNFqsatEDIOf7bg4NZ6Gs1QR0rJ
7 | p4W+LYKiP8UeJwV4Xd2d/h+rf4XBWo5HTNYwgZpZawklWupmIx0bXi3HiRs4MJQm
8 | mfbNrNE5YcAWQpBwAxgcUCwGHlvDonpvu0i0D4tNoMeneLAhZty1GCnamTlcuDXJ
9 | IZpg0Ky9mYEnyRhaRnLsyaZ2kzJhOSMNfVzSP2+ge+JfTuenw1yvhAZC9qTLoV+f
10 | 1xUhxUkmzgDV3pVpc9qB+LVGfJclrHtrgD2dakmph5JGGhAoAExwrkyO3qxE5jzJ
11 | x2C83aNpBjNqPhAVIcywXpFWBT8sbsXgwLufXWFwQtyxIm1dxrOku0SI5oYm1ZON
12 | l1rjkaQmpFKx1oo7eOG1XLbCQ1Ii1qEDSiXTvQwoaBTkAcPz1KOVGEyby6kf9AS9
13 | DjuJzh8oQgylaDk5FqGqsY6S90Naz7SJSrBi/3xOP51LvsLciNx+EPBNrRJzFDYw
14 | svn/ahaWx4hsXr0ErjqAzqE6ZNQQcyKa5qDDFD5dz14dSI78FjZ4u2WUwZCGyW+u
15 | OFOwPF0lXuPO6q2UFcpj
16 | =s6wC
17 | -----END PGP SIGNATURE-----
18 |
--------------------------------------------------------------------------------
/auto-upload/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-auto-upload"
3 | version.workspace = true
4 | edition.workspace = true
5 |
6 | [dependencies]
7 | futures = "0.3.31"
8 | tokio = { workspace = true, features = ["full"] }
9 | tracing = "0.1.41"
10 | silver-platter = { workspace = true, features = ["debian"] }
11 |
--------------------------------------------------------------------------------
/auto-upload/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! Auto-upload crate for the Janitor project.
2 | //!
3 | //! This crate provides functionality for automatically uploading Debian packages.
4 |
5 | #![deny(missing_docs)]
6 |
7 | /// Re-export for signing Debian packages
8 | pub use silver_platter::debian::uploader::debsign;
9 |
10 | /// Re-export for uploading Debian changes files
11 | pub use silver_platter::debian::uploader::dput_changes;
12 |
--------------------------------------------------------------------------------
/autopkgtest-wrapper:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | autopkgtest "$@"
3 | # autopkgtest(1) mentions that 2 indicates a skipped test. Ignore those:
4 | aptexit=$(($?&~2))
5 | echo "Exiting with $aptexit"
6 | exit $aptexit
7 |
--------------------------------------------------------------------------------
/build.rs:
--------------------------------------------------------------------------------
1 | use std::path::PathBuf;
2 |
3 | fn main() {
4 | let top_dir = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap())
5 | .canonicalize()
6 | .unwrap();
7 |
8 | protobuf_codegen::Codegen::new()
9 | .cargo_out_dir("generated")
10 | .inputs([top_dir.join("py/janitor/config.proto")])
11 | .include(top_dir)
12 | .run_from_script();
13 | }
14 |
--------------------------------------------------------------------------------
/bzr-store/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "bzr-store"
3 | version = "0.1.0"
4 | edition.workspace = true
5 |
6 | [lib]
7 |
8 | [dependencies]
9 |
--------------------------------------------------------------------------------
/bzr-store/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! Bzr Store crate for the Janitor project.
2 | //!
3 | //! This crate provides functionality for storing and managing Bazaar repositories.
4 |
5 | #![deny(missing_docs)]
6 |
--------------------------------------------------------------------------------
/common-py/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "common-py"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | publish = false
6 | edition.workspace = true
7 | description = "Common bindings for the janitor - python"
8 | license = "GPL-3.0+"
9 | repository = "https://github.com/jelmer/janitor.git"
10 | homepage = "https://github.com/jelmer/janitor"
11 |
12 | [lib]
13 | crate-type = ["cdylib"]
14 |
15 | [dependencies]
16 | pyo3 = {workspace = true, features=["serde", "chrono"]}
17 | janitor = { path = ".." }
18 | reqwest = { version = "0.12", features = ["json"] }
19 | serde_json = "1"
20 | pyo3-log = { workspace = true }
21 | log = "0.4"
22 | chrono = { workspace = true, features = ["serde"] }
23 | breezyshim.workspace = true
24 | silver-platter = { workspace = true, features = ["debian", "pyo3"] }
25 | url.workspace = true
26 | pyo3-async-runtimes = { workspace = true, features = ["tokio-runtime"] }
27 | tokio.workspace = true
28 | maplit.workspace = true
29 | pyo3-filelike.workspace = true
30 |
31 | [features]
32 | extension-module = ["pyo3/extension-module"]
33 |
--------------------------------------------------------------------------------
/common-py/src/debdiff.rs:
--------------------------------------------------------------------------------
1 | use pyo3::create_exception;
2 | use pyo3::prelude::*;
3 | use pyo3::types::PyBytes;
4 |
5 | #[pyfunction]
6 | fn debdiff_is_empty(debdiff: &str) -> PyResult {
7 | Ok(janitor::debdiff::debdiff_is_empty(debdiff))
8 | }
9 |
10 | #[pyfunction]
11 | fn filter_boring(debdiff: &str, old_version: &str, new_version: &str) -> PyResult {
12 | Ok(janitor::debdiff::filter_boring(
13 | debdiff,
14 | old_version,
15 | new_version,
16 | ))
17 | }
18 |
19 | #[pyfunction]
20 | fn section_is_wdiff(title: &str) -> PyResult<(bool, Option<&str>)> {
21 | Ok(janitor::debdiff::section_is_wdiff(title))
22 | }
23 |
24 | #[pyfunction]
25 | fn markdownify_debdiff(debdiff: &str) -> PyResult {
26 | Ok(janitor::debdiff::markdownify_debdiff(debdiff))
27 | }
28 |
29 | #[pyfunction]
30 | fn htmlize_debdiff(debdiff: &str) -> PyResult {
31 | Ok(janitor::debdiff::htmlize_debdiff(debdiff))
32 | }
33 |
34 | create_exception!(
35 | janitor.debian.debdiff,
36 | DebdiffError,
37 | pyo3::exceptions::PyException
38 | );
39 |
40 | #[pyfunction]
41 | fn run_debdiff<'a>(
42 | py: Python<'a>,
43 | old_binaries: Vec,
44 | new_binaries: Vec,
45 | ) -> PyResult> {
46 | pyo3_async_runtimes::tokio::future_into_py(py, async move {
47 | let r = janitor::debdiff::run_debdiff(
48 | old_binaries.iter().map(|x| x.as_str()).collect::>(),
49 | new_binaries.iter().map(|x| x.as_str()).collect::>(),
50 | )
51 | .await
52 | .map_err(|e| DebdiffError::new_err((e.to_string(),)))?;
53 |
54 | Ok(Python::with_gil(|py| {
55 | PyBytes::new_bound(py, &r).to_object(py)
56 | }))
57 | })
58 | }
59 |
60 | pub(crate) fn init_module(py: Python, m: &Bound) -> PyResult<()> {
61 | m.add_function(wrap_pyfunction!(debdiff_is_empty, m)?)?;
62 | m.add_function(wrap_pyfunction!(filter_boring, m)?)?;
63 | m.add_function(wrap_pyfunction!(section_is_wdiff, m)?)?;
64 | m.add_function(wrap_pyfunction!(markdownify_debdiff, m)?)?;
65 | m.add_function(wrap_pyfunction!(htmlize_debdiff, m)?)?;
66 | m.add_function(wrap_pyfunction!(run_debdiff, m)?)?;
67 | m.add("DebdiffError", py.get_type_bound::())?;
68 | Ok(())
69 | }
70 |
--------------------------------------------------------------------------------
/common-py/src/io.rs:
--------------------------------------------------------------------------------
1 | use pyo3::exceptions::PyRuntimeError;
2 | use pyo3::prelude::*;
3 | use pyo3::types::PyBytes;
4 | use std::io::Read;
5 |
6 | #[pyclass]
7 | pub(crate) struct Readable(Box);
8 |
9 | impl Readable {
10 | pub fn new(read: Box) -> Self {
11 | Self(read)
12 | }
13 | }
14 |
15 | #[pymethods]
16 | impl Readable {
17 | #[pyo3(signature = (size=None))]
18 | fn read(&mut self, py: Python, size: Option) -> PyResult {
19 | let mut buf = vec![0; size.unwrap_or(4096)];
20 | let n = self
21 | .0
22 | .read(&mut buf)
23 | .map_err(|e| PyRuntimeError::new_err(e))?;
24 | buf.truncate(n);
25 | Ok(PyBytes::new_bound(py, &buf).into())
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/common-py/src/lib.rs:
--------------------------------------------------------------------------------
1 | // Necessary since create_exception!() uses cfg!(feature = "gil-refs"),
2 | // but we don't have that feature.
3 | #![allow(unexpected_cfgs)]
4 | use pyo3::exceptions::PyValueError;
5 | use pyo3::prelude::*;
6 |
7 | mod artifacts;
8 | mod config;
9 | mod debdiff;
10 | mod io;
11 | mod logs;
12 | mod vcs;
13 |
14 | #[pyfunction]
15 | fn get_branch_vcs_type(branch: PyObject) -> PyResult {
16 | let branch = breezyshim::branch::GenericBranch::new(branch);
17 | janitor::vcs::get_branch_vcs_type(&branch)
18 | .map_err(|e| PyValueError::new_err((format!("{}", e),)))
19 | .map(|vcs| vcs.to_string())
20 | }
21 |
22 | #[pyfunction]
23 | fn is_authenticated_url(url: &str) -> PyResult {
24 | Ok(janitor::vcs::is_authenticated_url(
25 | &url::Url::parse(url)
26 | .map_err(|e| PyValueError::new_err((format!("Invalid URL: {}", e),)))?,
27 | ))
28 | }
29 |
30 | #[pyfunction]
31 | fn is_alioth_url(url: &str) -> PyResult {
32 | Ok(janitor::vcs::is_alioth_url(&url::Url::parse(url).map_err(
33 | |e| PyValueError::new_err((format!("Invalid URL: {}", e),)),
34 | )?))
35 | }
36 |
37 | #[pymodule]
38 | pub fn _common(py: Python, m: &Bound) -> PyResult<()> {
39 | pyo3_log::init();
40 | m.add_function(wrap_pyfunction!(is_authenticated_url, m)?)?;
41 | m.add_function(wrap_pyfunction!(is_alioth_url, m)?)?;
42 | m.add_function(wrap_pyfunction!(get_branch_vcs_type, m)?)?;
43 |
44 | let artifactsm = pyo3::types::PyModule::new_bound(py, "artifacts")?;
45 | crate::artifacts::init(py, &artifactsm)?;
46 | m.add_submodule(&artifactsm)?;
47 |
48 | let vcsm = pyo3::types::PyModule::new_bound(py, "vcs")?;
49 | crate::vcs::init(py, &vcsm)?;
50 | m.add_submodule(&vcsm)?;
51 |
52 | let logsm = pyo3::types::PyModule::new_bound(py, "logs")?;
53 | crate::logs::init(py, &logsm)?;
54 | m.add_submodule(&logsm)?;
55 |
56 | let configm = pyo3::types::PyModule::new_bound(py, "config")?;
57 | crate::config::init(py, &configm)?;
58 | m.add_submodule(&configm)?;
59 |
60 | let debdiff = PyModule::new_bound(py, "debdiff")?;
61 | crate::debdiff::init_module(py, &debdiff)?;
62 | m.add_submodule(&debdiff)?;
63 |
64 | Ok(())
65 | }
66 |
--------------------------------------------------------------------------------
/devnotes/adding-a-new-campaign.rst:
--------------------------------------------------------------------------------
1 | Adding a new campaign
2 | =====================
3 |
4 | Create a new codemod script
5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
6 |
7 | At the core of every campaign is a script that can make changes
8 | to a version controlled branch.
9 |
10 | This script will be executed in a version controlled checkout of
11 | a source codebase, and can make changes to the codebase as it sees fit.
12 | See `this blog post `_ for more
13 | information about creating codemod scripts.
14 |
15 | You can test the script independently by running silver-platter, e.g.
16 |
17 | ``./debian-svp apply --command=myscript --dry-run --diff`` (from a checkout)
18 | or
19 |
20 | ``./debian-svp run --command=myscript --dry-run --diff package-name``
21 |
22 | Add configuration for the campaign
23 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24 |
25 | In janitor.conf, add a section for the campaign. E.g.::
26 |
27 | campaign {
28 | name: "some-name"
29 | branch_name: "some-name"
30 | debian_build {
31 | build_suffix: "suf"
32 | }
33 | }
34 |
35 | Add script for finding candidates
36 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
37 |
38 | Add a script that can gather candidates for the new campaign. This script should
39 | be run regularly to find new candidates to schedule, with its JSON output
40 | uploaded to $RUNNER_URL/candidates.
41 |
--------------------------------------------------------------------------------
/devnotes/branch-names.rst:
--------------------------------------------------------------------------------
1 | Goal
2 | ====
3 |
4 | For runs:
5 | * runs/$uuid/$function tags
6 | * runs/$uuid/tags/$tag tags for new/updated upstream tags
7 |
8 | There is a symref at refs/$suite/$function that points at said tag and updated
9 |
10 | Also, track this information in the database.
11 |
12 | For related repositories, create remotes:
13 |
14 | * remotes/origin/ for Debian packaging
15 | * remotes/upstream/ for Upstream
16 |
17 | Names for functions:
18 |
19 | * "main" for the main branch (packaging or otherwise)
20 | * "upstream" for the upstream import branch for Debian packages
21 | * "pristine-tar" for the pristine-tar branch
22 |
23 | Roadmap
24 | =======
25 |
26 | * Update publisher to use new tag names
27 |
28 | * Send requests to publisher to mirror origin/upstream repositories
29 | To start off with just:
30 | * name of remote ("origin", "upstream")
31 | * URL of remote
32 |
33 | * Push symrefs (refs/$suite/$function => refs/tags/$uuid/$function)
34 | + needs to be done by publisher
35 |
--------------------------------------------------------------------------------
/devnotes/glossary.rst:
--------------------------------------------------------------------------------
1 | Campaign
2 | ########
3 |
4 | An effort to fix a particular thing in the set of packages the janitor instance
5 | is responsible for.
6 |
7 | E.g. lintian-fixes', 'fresh-upstreams', 'fresh-releases'.
8 |
9 | Codebase
10 | ########
11 |
12 | Typically, a VCS tree of some sort. Usually identified by a branch in a
13 | particular repository.
14 |
15 | Candidate
16 | #########
17 |
18 | A suite + codebase that has been identified as potentially being improveable.
19 |
20 | Target
21 | ######
22 |
23 | Either "debian" or "upstream". The kind of environment to target.
24 | This determines how the resulting codebase will be built.
25 |
--------------------------------------------------------------------------------
/differ-py/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "differ-py"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | edition.workspace = true
6 | description = "Differ for the janitor - python bindings"
7 | publish = false
8 | license = "GPL-3.0+"
9 | repository = "https://github.com/jelmer/janitor.git"
10 | homepage = "https://github.com/jelmer/janitor"
11 |
12 | [lib]
13 | crate-type = ["cdylib"]
14 |
15 | [dependencies]
16 | pyo3 = {workspace = true, features=["serde", "chrono"]}
17 | janitor-publish = { path = "../publish" }
18 | pyo3-log = { workspace = true }
19 | log = "0.4"
20 | chrono = { workspace = true, features = ["serde"] }
21 | breezyshim.workspace = true
22 | silver-platter = { workspace = true, features = ["debian"] }
23 | janitor-differ = { path = "../differ" }
24 | pyo3-async-runtimes = { workspace = true, features = ["tokio-runtime"] }
25 |
26 | [features]
27 | extension-module = ["pyo3/extension-module"]
28 |
--------------------------------------------------------------------------------
/differ-py/src/lib.rs:
--------------------------------------------------------------------------------
1 | use pyo3::exceptions::{PyRuntimeError, PyTimeoutError, PyValueError};
2 | use pyo3::prelude::*;
3 |
4 | #[pyfunction]
5 | #[pyo3(signature = (old_binaries, new_binaries, timeout = None, memory_limit = None, diffoscope_command = None))]
6 | fn run_diffoscope<'a>(
7 | py: Python<'a>,
8 | old_binaries: Vec<(String, String)>,
9 | new_binaries: Vec<(String, String)>,
10 | timeout: Option,
11 | memory_limit: Option,
12 | diffoscope_command: Option,
13 | ) -> PyResult> {
14 | pyo3_async_runtimes::tokio::future_into_py(py, async move {
15 | let old_binaries = old_binaries
16 | .iter()
17 | .map(|(path, hash)| (path.as_str(), hash.as_str()))
18 | .collect::>();
19 | let new_binaries = new_binaries
20 | .iter()
21 | .map(|(path, hash)| (path.as_str(), hash.as_str()))
22 | .collect::>();
23 |
24 | let o = janitor_differ::diffoscope::run_diffoscope(
25 | old_binaries.as_slice(),
26 | new_binaries.as_slice(),
27 | timeout,
28 | memory_limit,
29 | diffoscope_command.as_deref(),
30 | )
31 | .await
32 | .map_err(|e| match e {
33 | janitor_differ::diffoscope::DiffoscopeError::Timeout => {
34 | PyTimeoutError::new_err("Diffoscope timed out")
35 | }
36 | janitor_differ::diffoscope::DiffoscopeError::Io(e) => e.into(),
37 | janitor_differ::diffoscope::DiffoscopeError::Other(e) => PyRuntimeError::new_err(e),
38 | janitor_differ::diffoscope::DiffoscopeError::Serde(e) => {
39 | PyValueError::new_err(e.to_string())
40 | }
41 | })?;
42 | Ok(Python::with_gil(|py| o.to_object(py)))
43 | })
44 | }
45 |
46 | #[pyfunction]
47 | fn filter_boring_udiff(
48 | udiff: &str,
49 | old_version: &str,
50 | new_version: &str,
51 | display_version: &str,
52 | ) -> PyResult {
53 | let o = janitor_differ::diffoscope::filter_boring_udiff(
54 | udiff,
55 | old_version,
56 | new_version,
57 | display_version,
58 | )
59 | .map_err(|e| PyValueError::new_err(e.to_string()))?;
60 | Ok(o)
61 | }
62 |
63 | #[pymodule]
64 | pub fn _differ(m: &Bound) -> PyResult<()> {
65 | pyo3_log::init();
66 |
67 | m.add_function(wrap_pyfunction!(run_diffoscope, m)?)?;
68 | m.add_function(wrap_pyfunction!(filter_boring_udiff, m)?)?;
69 |
70 | Ok(())
71 | }
72 |
--------------------------------------------------------------------------------
/differ/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-differ"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | edition.workspace = true
6 | description = "Differ for the janitor"
7 | license = "GPL-3.0+"
8 | repository = "https://github.com/jelmer/janitor.git"
9 | homepage = "https://github.com/jelmer/janitor"
10 |
11 | [dependencies]
12 | janitor = { path = ".." }
13 | clap = { optional = true, workspace = true }
14 | env_logger = { optional = true, workspace = true }
15 | serde_json.workspace = true
16 | tokio = { workspace = true, features = ["full"] }
17 | tracing = "0.1.41"
18 | serde.workspace = true
19 | shlex.workspace = true
20 | patchkit = "0.2.1"
21 | axum.workspace = true
22 | sqlx.workspace = true
23 | redis = { workspace = true, features = ["aio", "connection-manager", "tokio", "tokio-comp", "json"] }
24 | tempfile.workspace = true
25 | breezyshim = { workspace = true, features = ["sqlx"] }
26 | nix = { version = "0.29.0", features = ["resource"] }
27 | axum-extra = { version = "0.10.1", features = ["typed-header"] }
28 | mime = "0.3.17"
29 | accept-header = "0.2.3"
30 | pyo3.workspace = true
31 |
32 | [dev-dependencies]
33 | maplit = { workspace = true }
34 | static_assertions = { workspace = true }
35 |
36 | [features]
37 | cli = ["dep:clap", "dep:env_logger"]
38 | default = ["cli"]
39 |
40 | [[bin]]
41 | name = "janitor-differ"
42 | path = "src/main.rs"
43 | required-features = ["cli"]
44 |
--------------------------------------------------------------------------------
/differ/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! Differ crate for the Janitor project.
2 | //!
3 | //! This crate provides functionality for finding and comparing binary files.
4 |
5 | #![deny(missing_docs)]
6 |
7 | /// Module for interacting with diffoscope
8 | pub mod diffoscope;
9 |
10 | use std::ffi::{OsStr, OsString};
11 | use std::path::{Path, PathBuf};
12 |
13 | /// Find binary files in a directory.
14 | ///
15 | /// # Arguments
16 | /// * `path` - The directory to search
17 | ///
18 | /// # Returns
19 | /// An iterator of (filename, path) pairs
20 | pub fn find_binaries(path: &Path) -> impl Iterator {
21 | std::fs::read_dir(path).unwrap().filter_map(|entry| {
22 | let entry = entry.ok()?;
23 | let path = entry.path();
24 | Some((entry.file_name(), path))
25 | })
26 | }
27 |
28 | /// Check if a filename is a binary package.
29 | ///
30 | /// # Arguments
31 | /// * `name` - The filename to check
32 | ///
33 | /// # Returns
34 | /// `true` if the file is a binary package, `false` otherwise
35 | pub fn is_binary(name: &str) -> bool {
36 | name.ends_with(".deb") || name.ends_with(".udeb")
37 | }
38 |
--------------------------------------------------------------------------------
/docs/flow.md:
--------------------------------------------------------------------------------
1 | Package metadata
2 | ================
3 |
4 | Package metadata contains mostly static information about a package, imported
5 | straight from the archive. This includes the package name, maintainer email,
6 | uploader emails (if any) as well as the version control information
7 | (vcs type, URL, subpath) and optionally popularity.
8 |
9 | The "schedule" job regularly imports package metadata. On Debian, this information
10 | comes from UDD. On other Debian-like distributions, it's imported from the
11 | apt sources file.
12 |
13 | The importing has two components:
14 |
15 | * A script that can output Package() protobufs (see janitor/package_metadata.proto) to standard out
16 | * An importer that reads these protobufs on standard in and updates the database (janitor.package_metadata)
17 |
18 | Candidates
19 | ==========
20 |
21 | Once the janitor knows about a package, candidates can be created. A candidate
22 | is a bit of data that a particular suite (TODO: better name) (e.g. lintian-fixes)
23 | can be run on a particular package and that there is some chance it will yield
24 | changes.
25 |
26 | Candidates include information like:
27 |
28 | * value: a relative number that explains how useful this change would be
29 | * success_chance: an estimate of how likely this change is to succeed and result in a build
30 | * context: some indicator of the current state of the world. Used to avoid retrying
31 | builds if nothing has really changed. e.g. for new upstream releases, this
32 | is the upstream version number of the latest release
33 |
34 | Like package metadata, candidates are generated by a script that writes
35 | YAML to standard output. Candidate generation scripts
36 | can be really complicated - allowing for more optimal scheduling - or really
37 | simple, in which case they just output a candidate for each package in a suite
38 | with fixed settings for value and succes_chance.
39 |
40 | Scheduling
41 | ==========
42 |
43 | Once candidates have been created, the schedule job (``janitor.schedule``)
44 | inserts new entries into the queue, taking into account a variety of factors:
45 |
46 | * success chance
47 | * value
48 | * popularity of the package if known (from popcon)
49 | * previous success rate (for the suite/package combination and the package itself)
50 | * previous run duration
51 | * whether the context has changed since the last run
52 |
53 | The queue consists of prioritized buckets. Manually requested runs, runs triggered
54 | by the publisher (e.g. to resolve merge conflicts) and retried runs are always
55 | executed before runs that were scheduled by the scheduler.
56 |
--------------------------------------------------------------------------------
/docs/glossary.md:
--------------------------------------------------------------------------------
1 | * *codebase*: A collection of source code files that are managed together in a
2 | version control system. Usually this will be the root of a specific branch in a
3 | vcs repository. Sometimes, it will be a subdirectory in a VCS. It can also be
4 | e.g. a tarball somewhere.
5 |
6 | * *cotenants*: Other codebases that share the same branch as the current codebase.
7 |
--------------------------------------------------------------------------------
/docs/production.md:
--------------------------------------------------------------------------------
1 | # Running Janitor in production
2 |
3 | There are [containers](Dockerfiles_.md) available for each of the Janitor services.
4 |
5 | [pre-built containers](https://github.com/jelmer?tab=packages&repo_name=janitor) are
6 | available, but you can also create them yourself:
7 |
8 | ```console
9 | $ sudo apt install \
10 | buildah \
11 | make
12 | $ make build-all
13 | ```
14 |
15 | For a Janitor instance, you probably want a custom website in combination with
16 | the Janitor API. See the existing instances for inspiration.
17 |
--------------------------------------------------------------------------------
/docs/structure.md:
--------------------------------------------------------------------------------
1 | ## Structure
2 |
3 | - `./reschedule.py` - a tool for users of the janitor and can be run by anybody locally
4 | - `./helpers/*` - all need to run inside of a janitor deployment (and talk to the database, etc) by an admin.
5 |
--------------------------------------------------------------------------------
/git-store/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-git-store"
3 | version = "0.1.0"
4 | edition.workspace = true
5 |
6 | [lib]
7 |
8 | [dependencies]
9 |
--------------------------------------------------------------------------------
/git-store/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! Git Store crate for the Janitor project.
2 | //!
3 | //! This crate provides functionality for storing and managing Git repositories.
4 |
5 | #![deny(missing_docs)]
6 |
--------------------------------------------------------------------------------
/git-store/src/main.rs:
--------------------------------------------------------------------------------
1 | fn main() {
2 | println!("Hello, world!");
3 | }
4 |
--------------------------------------------------------------------------------
/helpers/cleanup-repositories.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | """Cleanup owned repositories that are no longer needed for merge proposals.
3 |
4 | This is necessary in particular because some hosting sites
5 | (e.g. default GitLab) have restrictions on the number of repositories
6 | that a single user can own (in the case of GitLab, 1000).
7 | """
8 |
9 | import logging
10 | import sys
11 |
12 | import breezy
13 | import breezy.bzr
14 | import breezy.git # noqa: F401
15 | import breezy.plugins
16 | import breezy.plugins.github # noqa: F401
17 | import breezy.plugins.gitlab # noqa: F401
18 | import breezy.plugins.launchpad # noqa: F401
19 | from breezy.forge import UnsupportedForge, iter_forge_instances
20 |
21 |
22 | def projects_to_remove(instance):
23 | in_use = set()
24 | for mp in instance.iter_my_proposals():
25 | if not mp.is_closed() and not mp.is_merged():
26 | in_use.add(mp.get_source_project())
27 | for project in instance.iter_my_forks():
28 | if project in in_use:
29 | continue
30 | yield project
31 |
32 |
33 | def main(argv=None):
34 | import argparse
35 |
36 | parser = argparse.ArgumentParser()
37 | parser.add_argument("--dry-run", action="store_true", help="Dry run.")
38 | args = parser.parse_args()
39 |
40 | logging.basicConfig(format="%(message)s")
41 |
42 | for instance in iter_forge_instances():
43 | try:
44 | for project in projects_to_remove(instance):
45 | logging.info(f"Deleting {project} from {instance!r}")
46 | if not args.dry_run:
47 | instance.delete_project(project)
48 | except UnsupportedForge as e:
49 | logging.warning("Ignoring unsupported instance %s: %s", instance, e)
50 |
51 |
52 | if __name__ == "__main__":
53 | sys.exit(main())
54 |
--------------------------------------------------------------------------------
/helpers/render-publish-template.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import argparse
4 | import asyncio
5 | import logging
6 | import os
7 | import sys
8 |
9 | from janitor.config import read_config
10 | from janitor.publish_one import load_template_env
11 |
12 | sys.path.insert(0, os.path.dirname(__file__))
13 |
14 | from janitor import state # noqa: E402
15 | from janitor.debian.debdiff import debdiff_is_empty, markdownify_debdiff # noqa: E402
16 |
17 | loop = asyncio.get_event_loop()
18 |
19 | parser = argparse.ArgumentParser()
20 | parser.add_argument(
21 | "--config", type=str, default="janitor.conf", help="Path to configuration."
22 | )
23 | parser.add_argument("-r", "--run-id", type=str, help="Run id to process")
24 | parser.add_argument("--role", type=str, help="Role", default="main")
25 | parser.add_argument("--format", type=str, choices=["md", "txt"], default="md")
26 |
27 | parser.add_argument("--template-env-path", type=str, help="Path to template env")
28 |
29 | args = parser.parse_args()
30 |
31 | logging.basicConfig(level=logging.INFO, format="%(message)s")
32 |
33 | try:
34 | with open(args.config) as f:
35 | config = read_config(f)
36 | except FileNotFoundError:
37 | parser.error(f"config path {args.config} does not exist")
38 |
39 | template_env = load_template_env(args.template_env_path)
40 |
41 |
42 | async def process_build(db_location, run_id, role, format):
43 | async with state.create_pool(db_location) as conn:
44 | query = """
45 | SELECT
46 | package.name AS package,
47 | suite AS campaign,
48 | id AS log_id,
49 | result AS _result
50 | FROM run
51 | LEFT JOIN package ON run.codebase = package.codebase
52 | WHERE
53 | id = $1
54 | """
55 | row = await conn.fetchrow(query, run_id)
56 | vs = {}
57 | vs.update(row)
58 | if row["_result"]:
59 | vs.update(row["_result"])
60 | vs["external_url"] = "https://janitor.debian.net/"
61 | vs["markdownify_debdiff"] = markdownify_debdiff
62 | vs["debdiff_is_empty"] = debdiff_is_empty
63 | print(template_env.get_template(vs["suite"] + "." + format).render(vs))
64 |
65 |
66 | loop.run_until_complete(
67 | process_build(config.database_location, args.run_id, args.role, args.format)
68 | )
69 |
--------------------------------------------------------------------------------
/mail-filter/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-mail-filter"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | edition.workspace = true
6 | description = "Mail filter for the janitor"
7 | license = "Apache-2.0"
8 | repository = "https://github.com/jelmer/janitor.git"
9 | homepage = "https://github.com/jelmer/janitor"
10 |
11 | [dependencies]
12 | isahc = "1"
13 | serde_json = "1"
14 | select = "0.6"
15 | mailparse = "0.16"
16 | async-std = "1"
17 | log = "0.4"
18 | clap = { workspace = true, optional = true, features = ["derive"] }
19 | reqwest = { version = "0.12", features = ["blocking", "json"], optional = true }
20 |
21 | [[bin]]
22 | name="janitor-mail-filter"
23 | required-features=["cmdline"]
24 |
25 | [features]
26 | default = ["cmdline"]
27 | cmdline = ["dep:clap", "dep:reqwest"]
28 |
--------------------------------------------------------------------------------
/mail-filter/src/bin/janitor-mail-filter.rs:
--------------------------------------------------------------------------------
1 | use std::fs::File;
2 |
3 | use log::debug;
4 | use std::process;
5 |
6 | use clap::Parser;
7 | use log::{error, info};
8 | use reqwest::blocking::Client;
9 |
10 | #[derive(Parser)]
11 | struct Args {
12 | #[clap(
13 | short,
14 | long,
15 | default_value = "https://janitor.debian.net/api/refresh-proposal-status"
16 | )]
17 | refresh_url: String,
18 | #[clap(short, long, default_value = "/dev/stdin")]
19 | input: String,
20 | }
21 |
22 | fn refresh_merge_proposal(api_url: &str, merge_proposal_url: &str) -> Result<(), String> {
23 | let client = Client::new();
24 | let res = client
25 | .post(api_url)
26 | .json(&serde_json::json!({"url": merge_proposal_url}))
27 | .send()
28 | .map_err(|e| e.to_string())?;
29 |
30 | match res.status().as_u16() {
31 | 200 | 202 => Ok(()),
32 | status => Err(format!(
33 | "error {} triggering refresh for {}",
34 | status, api_url
35 | )),
36 | }
37 | }
38 |
39 | fn main() {
40 | let args = Args::parse();
41 |
42 | let f = File::open(args.input).unwrap();
43 |
44 | match janitor_mail_filter::parse_email(f) {
45 | Some(merge_proposal_url) => {
46 | info!("Found merge proposal URL: {}", merge_proposal_url);
47 | match refresh_merge_proposal(&args.refresh_url, &merge_proposal_url) {
48 | Ok(()) => process::exit(0),
49 | Err(e) => {
50 | error!("Error: {}", e);
51 | process::exit(1);
52 | }
53 | }
54 | }
55 | None => {
56 | debug!("No merge proposal URL found.");
57 | process::exit(0);
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/mail-filter/src/tests.rs:
--------------------------------------------------------------------------------
1 | use super::parse_email;
2 |
3 | #[test]
4 | fn test_parse_github_merged_email() {
5 | let email = include_bytes!("../tests/data/github-merged-email.txt");
6 |
7 | assert_eq!(
8 | Some("https://github.com/UbuntuBudgie/budgie-desktop/pull/78"),
9 | parse_email(std::io::Cursor::new(email)).as_deref()
10 | );
11 | }
12 |
13 | #[test]
14 | fn test_parse_gitlab_merged_email() {
15 | let email = include_bytes!("../tests/data/gitlab-merged-email.txt");
16 |
17 | assert_eq!(
18 | Some("https://salsa.debian.org/debian/pkg-lojban-common/-/merge_requests/2"),
19 | parse_email(std::io::Cursor::new(email)).as_deref()
20 | );
21 | }
22 |
--------------------------------------------------------------------------------
/publish-py/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "publish-py"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | edition.workspace = true
6 | description = "Publisher for the janitor - python bindings"
7 | publish = false
8 | license = "GPL-3.0+"
9 | repository = "https://github.com/jelmer/janitor.git"
10 | homepage = "https://github.com/jelmer/janitor"
11 |
12 | [lib]
13 | crate-type = ["cdylib"]
14 |
15 | [dependencies]
16 | pyo3 = {workspace = true, features=["serde", "chrono"]}
17 | janitor-publish = { path = "../publish" }
18 | pyo3-log = { workspace = true }
19 | log = "0.4"
20 | chrono = { workspace = true, features = ["serde"] }
21 | breezyshim.workspace = true
22 | silver-platter = { workspace = true, features = ["debian"] }
23 | url.workspace = true
24 |
25 | [features]
26 | extension-module = ["pyo3/extension-module"]
27 |
--------------------------------------------------------------------------------
/publish.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | PYTHONPATH="$PYTHONPATH:$(pwd)/lintian-brush:$(pwd)/silver-platter:$(pwd)/breezy" python3 -m janitor.publish "$@"
3 |
--------------------------------------------------------------------------------
/publish/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "janitor-publish"
3 | version = "0.0.0"
4 | authors = ["Jelmer Vernooij "]
5 | edition.workspace = true
6 | description = "Publisher for the janitor"
7 | license = "GPL-3.0+"
8 | repository = "https://github.com/jelmer/janitor.git"
9 | homepage = "https://github.com/jelmer/janitor"
10 |
11 | [dependencies]
12 | axum = { workspace = true }
13 | breezyshim = { workspace = true, features = ["sqlx"] }
14 | chrono.workspace = true
15 | clap = { workspace = true, features = ["derive"] }
16 | debian-changelog = "0.2.0"
17 | janitor = { path = ".." }
18 | log.workspace = true
19 | minijinja = { version = "2", features = ["loader"] }
20 | pyo3.workspace = true
21 | redis = { workspace = true, features = ["tokio-comp", "json", "connection-manager"] }
22 | rslock = { workspace = true, default-features = false, features = ["tokio-comp"] }
23 | reqwest.workspace = true
24 | serde.workspace = true
25 | serde_json.workspace = true
26 | shlex.workspace = true
27 | silver-platter.workspace = true
28 | tokio = { workspace = true, features = ["full"] }
29 | url = { workspace = true, features = ["serde"] }
30 | sqlx = { workspace = true, features = ["chrono"] }
31 | maplit.workspace = true
32 | prometheus = "0.14.0"
33 |
34 | [dev-dependencies]
35 | maplit = { workspace = true }
36 |
--------------------------------------------------------------------------------
/pull_worker.sh:
--------------------------------------------------------------------------------
1 | run_worker.sh
--------------------------------------------------------------------------------
/py/janitor/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # Copyright (C) 2018 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 |
19 | import shlex
20 | from urllib.request import URLopener, build_opener, install_opener
21 |
22 | from breezy.transport import http as _mod_http
23 | from breezy.transport.http import urllib as _mod_urllib
24 |
25 | __version__ = (0, 1, 0)
26 | version_string = ".".join(map(str, __version__))
27 |
28 |
29 | def set_user_agent(user_agent):
30 | _mod_http.default_user_agent = lambda: user_agent
31 | _mod_urllib.AbstractHTTPHandler._default_headers["User-agent"] = user_agent
32 | URLopener.version = user_agent
33 | opener = build_opener()
34 | opener.addheaders = [("User-agent", user_agent)]
35 | install_opener(opener)
36 |
37 |
38 | CAMPAIGN_REGEX = "[a-z0-9-]+"
39 |
40 |
41 | def splitout_env(command):
42 | args = shlex.split(command)
43 | env = {}
44 | while len(args) > 0 and "=" in args[0]:
45 | (key, value) = args.pop(0).split("=", 1)
46 | env[key] = value
47 | return env, shlex.join(args)
48 |
--------------------------------------------------------------------------------
/py/janitor/_common.pyi:
--------------------------------------------------------------------------------
1 | from breezy.branch import Branch
2 |
3 | def is_alioth_url(url: str) -> bool: ...
4 | def is_authenticated_url(url: str) -> bool: ...
5 | def get_branch_vcs_type(branch: Branch) -> str: ...
6 |
--------------------------------------------------------------------------------
/py/janitor/_launchpad.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2019 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 |
19 | def override_launchpad_consumer_name():
20 | from breezy.forge import ForgeLoginRequired
21 | from breezy.plugins.launchpad import lp_api
22 | from launchpadlib.credentials import RequestTokenAuthorizationEngine
23 | from launchpadlib.launchpad import Launchpad
24 |
25 | class LoginRequiredAuthorizationEngine(RequestTokenAuthorizationEngine):
26 | def make_end_user_authorize_token(self, credentials, request_token):
27 | raise ForgeLoginRequired(self.web_root)
28 |
29 | def connect_launchpad(
30 | base_url, timeout=None, proxy_info=None, version=Launchpad.DEFAULT_VERSION
31 | ):
32 | cache_directory = lp_api.get_cache_directory()
33 | credential_store = lp_api.BreezyCredentialStore()
34 | authorization_engine = LoginRequiredAuthorizationEngine(
35 | base_url, consumer_name="Janitor"
36 | )
37 | return Launchpad.login_with(
38 | "Janitor",
39 | base_url,
40 | cache_directory,
41 | timeout=timeout,
42 | credential_store=credential_store,
43 | authorization_engine=authorization_engine,
44 | version=version,
45 | )
46 |
47 | lp_api.connect_launchpad = connect_launchpad
48 |
--------------------------------------------------------------------------------
/py/janitor/_publish.pyi:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | def calculate_next_try_time(finish_time: datetime, attempt_count: int) -> datetime: ...
4 | def get_merged_by_user_url(url: str, user: str) -> str | None: ...
5 | def branches_match(url_a: str | None, url_b: str | None) -> bool: ...
6 | def role_branch_url(url: str, remote_branch_name: str | None) -> str: ...
7 |
8 | class RateLimiter:
9 | def set_mps_per_bucket(self, mps_per_bucket: dict[str, dict[str, int]]) -> None: ...
10 | def check_allowed(self, bucket: str) -> None: ...
11 | def inc(self, bucket: str) -> None: ...
12 | def get_stats(self) -> dict[str, tuple[int, int | None]]: ...
13 |
14 | class SlowStartRateLimiter(RateLimiter):
15 | def __init__(self, mps_per_bucket: int | None) -> None: ...
16 |
17 | class NonRateLimiter(RateLimiter):
18 | def __init__(self) -> None: ...
19 |
20 | class FixedRateLimiter(RateLimiter):
21 | def __init__(self, mps_per_bucket: int) -> None: ...
22 |
23 | class RateLimited(Exception):
24 | def __init__(self, message: str) -> None: ...
25 |
26 | class BucketRateLimited(RateLimited):
27 | def __init__(self, bucket: str, open_mps: int, max_open_mps: int) -> None: ...
28 |
29 | bucket: str
30 | open_mps: int
31 | max_open_mps: int
32 |
--------------------------------------------------------------------------------
/py/janitor/_runner.pyi:
--------------------------------------------------------------------------------
1 | def committer_env(committer: str | None) -> dict[str, str]: ...
2 | def is_log_filename(filename: str) -> bool: ...
3 |
--------------------------------------------------------------------------------
/py/janitor/_site.pyi:
--------------------------------------------------------------------------------
1 | def find_dist_log_failure(
2 | logf: str, length: int
3 | ) -> tuple[int, tuple[int, int], list[int] | None]: ...
4 | def find_build_log_failure(
5 | logf: str, length: int
6 | ) -> tuple[int, tuple[int, int], list[int] | None]: ...
7 |
--------------------------------------------------------------------------------
/py/janitor/artifacts.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2020 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 | """Artifacts."""
19 |
20 | import asyncio
21 |
22 | from ._common import artifacts # type: ignore
23 |
24 | ArtifactManager = artifacts.ArtifactManager
25 | GCSArtifactManager = artifacts.GCSArtifactManager
26 | LocalArtifactManager = artifacts.LocalArtifactManager
27 | ServiceUnavailable = artifacts.ServiceUnavailable
28 | ArtifactsMissing = artifacts.ArtifactsMissing
29 | get_artifact_manager = artifacts.get_artifact_manager
30 | list_ids = artifacts.list_ids
31 | upload_backup_artifacts = artifacts.upload_backup_artifacts
32 | store_artifacts_with_backup = artifacts.store_artifacts_with_backup
33 |
34 | DEFAULT_GCS_TIMEOUT = 60
35 |
36 |
37 | if __name__ == "__main__":
38 | import argparse
39 |
40 | parser = argparse.ArgumentParser()
41 | subparsers = parser.add_subparsers(dest="command")
42 | list_parser = subparsers.add_parser("list")
43 | list_parser.add_argument("location", type=str)
44 | args = parser.parse_args()
45 | if args.command == "list":
46 | manager = get_artifact_manager(args.location)
47 | asyncio.run(list_ids(manager))
48 |
--------------------------------------------------------------------------------
/py/janitor/config.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2018 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 | __all__ = [
19 | "Config",
20 | "Campaign",
21 | "AptRepository",
22 | "read_config",
23 | "read_string",
24 | "get_campaign_config",
25 | "get_distribution",
26 | ]
27 |
28 |
29 | from ._common import config as _config_rs # type: ignore
30 |
31 | Config = _config_rs.Config
32 | Campaign = _config_rs.Campaign
33 | AptRepository = _config_rs.AptRepository
34 | read_config = _config_rs.read_config
35 | read_string = _config_rs.read_string
36 | get_distribution = _config_rs.get_distribution
37 | get_campaign_config = _config_rs.get_campaign_config
38 |
39 |
40 | if __name__ == "__main__":
41 | import argparse
42 |
43 | parser = argparse.ArgumentParser()
44 | parser.add_argument("config_file", type=str, help="Configuration file to read")
45 | args = parser.parse_args()
46 | with open(args.config_file) as f:
47 | config = read_config(f)
48 |
--------------------------------------------------------------------------------
/py/janitor/debian/debdiff.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2019 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 | from .._common import debdiff # type: ignore
19 |
20 | debdiff_is_empty = debdiff.debdiff_is_empty # type: ignore
21 | filter_boring = debdiff.filter_boring # type: ignore
22 | section_is_wdiff = debdiff.section_is_wdiff # type: ignore
23 | markdownify_debdiff = debdiff.markdownify_debdiff # type: ignore
24 | htmlize_debdiff = debdiff.htmlize_debdiff # type: ignore
25 | DebdiffError = debdiff.DebdiffError # type: ignore
26 | run_debdiff = debdiff.run_debdiff # type: ignore
27 |
--------------------------------------------------------------------------------
/py/janitor/debian/debian.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS debversion;
2 | CREATE TABLE debian_build (
3 | run_id text not null references run (id),
4 | -- Debian version text of the built package
5 | version debversion not null,
6 | -- Distribution the package was built for (e.g. "lintian-fixes")
7 | distribution text not null,
8 | source text not null,
9 | binary_packages text[],
10 | lintian_result json
11 | );
12 | CREATE INDEX ON debian_build (run_id);
13 | CREATE INDEX ON debian_build (distribution, source, version);
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/py/janitor/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jelmer/janitor/358562478447842c256beddffd1227722b6d7ad1/py/janitor/py.typed
--------------------------------------------------------------------------------
/py/janitor/review.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2018 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 | from typing import Optional
19 |
20 | from yarl import URL
21 |
22 | from .schedule import do_schedule
23 |
24 |
25 | async def store_review(
26 | conn,
27 | session,
28 | runner_url,
29 | run_id: str,
30 | verdict: str,
31 | comment: Optional[str],
32 | reviewer: Optional[str],
33 | is_qa_reviewer: bool,
34 | ):
35 | async with conn.transaction():
36 | if verdict == "reschedule":
37 | verdict = "rejected"
38 |
39 | run = await conn.fetchrow(
40 | "SELECT suite, codebase FROM run WHERE id = $1", run_id
41 | )
42 | await do_schedule(
43 | conn,
44 | campaign=run["suite"],
45 | refresh=True,
46 | requester=f"reviewer ({reviewer})",
47 | bucket="default",
48 | codebase=run["codebase"],
49 | )
50 |
51 | if verdict != "abstained" and is_qa_reviewer:
52 | async with session.post(
53 | URL(runner_url) / "runs" / run_id,
54 | json={"publish_status": verdict},
55 | raise_for_status=True,
56 | ):
57 | pass
58 | await conn.execute(
59 | "INSERT INTO review (run_id, comment, reviewer, verdict) VALUES "
60 | " ($1, $2, $3, $4) ON CONFLICT (run_id, reviewer) "
61 | "DO UPDATE SET verdict = EXCLUDED.verdict, comment = EXCLUDED.comment, "
62 | "reviewed_at = NOW()",
63 | run_id,
64 | comment,
65 | reviewer,
66 | verdict,
67 | )
68 |
--------------------------------------------------------------------------------
/py/janitor/site/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jelmer/janitor/358562478447842c256beddffd1227722b6d7ad1/py/janitor/site/_static/file.png
--------------------------------------------------------------------------------
/py/janitor/site/_static/janitor.css:
--------------------------------------------------------------------------------
1 | /* -- result codes */
2 | span.result-code-bug, span.publish-bug {
3 | color: darkred;
4 | }
5 |
6 | span.result-code-failure, span.publish-failure {
7 | color: red;
8 | }
9 |
10 | span.result-code-transient-failure, span.publish-transient {
11 | color: orange;
12 | }
13 |
14 | span.result-code-success, span.publish-success {
15 | color: green;
16 | }
17 |
18 | span.result-code-nothing-new-to-do, span.publish-nothing-to-do {
19 | color: lightgreen;
20 | }
21 |
22 | span.result-code-nothing-to-do, span.publish-missing {
23 | color: grey;
24 | }
25 |
26 | blockquote.result-code-explanation {
27 | background-color: #FE9;
28 | }
29 |
30 | tr.package-error {
31 | background-color: coral;
32 | }
33 |
34 | tr.package-unabsorbed {
35 | background-color: azure;
36 | }
37 |
38 | tr.package-candidates {
39 | background-color: beige;
40 | }
41 |
42 | tr.package-nothing-to-do, tr.package-nothing-new-to-do {
43 | background-color: grey;
44 | opacity: 0.5;
45 | }
46 |
47 | tr.package-proposal {
48 | background-color: lightgreen;
49 | }
50 |
51 | li.not-in-archive {
52 | font-weight: bolder;
53 | }
54 |
55 | ul.metadata {
56 | list-style: none;
57 | background-color: lightyellow;
58 | }
59 |
60 | tr.row-disabled, td.old-version {
61 | color: lightgrey;
62 | }
63 |
64 | span.tinycode {
65 | font-family: monospace;
66 | font-weight: bold;
67 | font-size: 1.1em;
68 | padding: 1px 3px;
69 | }
70 |
71 | span.tinyhint {
72 | font-weight: 700;
73 | font-size: 0.9em;
74 | }
75 |
76 | span.tinycontext {
77 | font-style: italic;
78 | font-size: 0.9em;
79 | }
80 |
81 | span.tinycomment {
82 | font-size: 0.9em;
83 | }
84 |
85 | td.version {
86 | text-align: center;
87 | }
88 |
89 | /* Queue active lines */
90 | tr.active {
91 | background-color: bisque;
92 | }
93 |
--------------------------------------------------------------------------------
/py/janitor/site/_static/janitor.js:
--------------------------------------------------------------------------------
1 | var handlers = [];
2 |
3 | registerHandler = function(kind, cb) {
4 | handlers.push({'kind': kind, 'callback': cb});
5 | };
6 |
7 | var ws_url;
8 | window.onload = function() {
9 | if(location.protocol == 'http:') {
10 | ws_url = 'ws://' + location.hostname + '/ws/notifications';
11 | } else if(location.protocol == 'https:') {
12 | ws_url = 'wss://' + location.hostname + '/ws/notifications';
13 | } else {
14 | console.log('Unknown protocol: ' + location.protocol);
15 | ws_url = undefined;
16 | }
17 |
18 | const connection = new WebSocket(ws_url);
19 |
20 | connection.onerror = (error) => {
21 | console.log('WebSocket error: ');
22 | console.log(error);
23 | }
24 |
25 | connection.onmessage = (e) => {
26 | data = JSON.parse(e.data);
27 | handlers.forEach(function(handler) {
28 | if (handler.kind == data[0]) { handler.callback(data[1]); }
29 | });
30 | console.log(data);
31 | }
32 | }
33 |
34 | windowbeforeunload = function(){
35 | socket.close();
36 | };
37 |
38 | // Please keep this logic in sync with janitor/site/__init__.py:format_duration
39 | format_duration = function(n) {
40 | var d = moment.duration(n, "s");
41 | var ret = "";
42 | if (d.weeks() > 0) {
43 | return d.weeks() + "w" + (d.days() % 7) + "d";
44 | }
45 | if (d.days() > 0) {
46 | return d.days() + "d" + (d.hours() % 24) + "h";
47 | }
48 | if (d.hours() > 0) {
49 | return d.hours() + "h" + (d.minutes() % 60) + "m";
50 | }
51 | if (d.minutes() > 0) {
52 | return d.minutes() + "m" + (d.seconds() % 60) + "s";
53 | }
54 | return d.seconds() + "s";
55 | };
56 |
57 |
58 | window.chartColors = {
59 | red: 'rgb(255, 99, 132)',
60 | orange: 'rgb(255, 159, 64)',
61 | yellow: 'rgb(255, 205, 86)',
62 | green: 'rgb(75, 192, 192)',
63 | blue: 'rgb(54, 162, 235)',
64 | purple: 'rgb(153, 102, 255)',
65 | grey: 'rgb(201, 203, 207)'
66 | };
67 |
--------------------------------------------------------------------------------
/py/janitor/site/_static/lintian.css:
--------------------------------------------------------------------------------
1 | ul.lintian-filelist {
2 | list-style-type: disclosure-closed;
3 | }
4 |
5 | ul.lintian-hintlist {
6 | list-style: none;
7 | padding-left: 5px;
8 | }
9 |
10 | ul.lintian-commentlist {
11 | list-style: none;
12 | padding-left: 0px;
13 | }
14 |
15 | li.lintian-fileitem {
16 | margin: 5px 0 5px 0;
17 | }
18 |
19 | #lintian-code-informative {
20 | color: #111;
21 | background-color: #C7EA3C;
22 | }
23 |
24 | #lintian-code-warning {
25 | color: #111;
26 | background-color: #FFEB44;
27 | }
28 |
29 | #lintian-code-error {
30 | color: #111;
31 | background-color: #FF6700;
32 | }
33 |
34 | #lintian-code-X {
35 | color: #111;
36 | background-color: #EE99EE;
37 | }
38 |
39 | #lintian-code-O {
40 | color: #111;
41 | background-color: #DDD;
42 | }
43 |
44 | #lintian-code-pedantic {
45 | color: #111;
46 | background-color: #C7EA3C;
47 | }
48 |
49 | #lintian-code-classification {
50 | color: #111;
51 | background-color: lightblue;
52 | }
53 |
54 | li.lintian-code-O {
55 | color: #444;
56 | }
57 |
--------------------------------------------------------------------------------
/py/janitor/site/cupboard/merge_proposals.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | from typing import Optional
4 |
5 | import asyncpg
6 |
7 |
8 | async def get_proposals_with_run(conn: asyncpg.Connection, campaign: Optional[str]):
9 | query = """
10 | SELECT
11 | DISTINCT ON (merge_proposal.url)
12 | run.codebase AS codebase,
13 | run.suite AS suite,
14 | merge_proposal.url AS url,
15 | merge_proposal.status AS status
16 | FROM
17 | merge_proposal
18 | LEFT JOIN new_result_branch ON new_result_branch.revision = merge_proposal.revision
19 | LEFT JOIN run ON run.id = new_result_branch.run_id
20 | """
21 | args = []
22 | if campaign:
23 | args.append(campaign)
24 | query += """
25 | WHERE suite = $1
26 | """
27 | query += """
28 | ORDER BY merge_proposal.url, run.finish_time DESC
29 | """
30 | return await conn.fetch(query, *args)
31 |
32 |
33 | async def write_merge_proposals(db, suite):
34 | async with db.acquire() as conn:
35 | proposals_by_status: dict[str, list[asyncpg.Record]] = {}
36 | for row in await get_proposals_with_run(conn, campaign=suite):
37 | proposals_by_status.setdefault(row["status"], []).append(row)
38 |
39 | merged = proposals_by_status.get("merged", []) + proposals_by_status.get(
40 | "applied", []
41 | )
42 | return {
43 | "suite": suite,
44 | "open_proposals": proposals_by_status.get("open", []),
45 | "merged_proposals": merged,
46 | "closed_proposals": proposals_by_status.get("closed", []),
47 | "rejected_proposals": proposals_by_status.get("rejected", []),
48 | "abandoned_proposals": proposals_by_status.get("abandoned", []),
49 | }
50 |
51 |
52 | async def get_proposal_with_run(conn: asyncpg.Connection, url: str):
53 | query = """
54 | SELECT
55 | run.codebase AS codebase,
56 | run.suite AS suite,
57 | merge_proposal.url AS url,
58 | merge_proposal.status AS status,
59 | merge_proposal.merged_at AS merged_at,
60 | merge_proposal.merged_by AS merged_by,
61 | merge_proposal.last_scanned AS last_scanned,
62 | merge_proposal.can_be_merged AS can_be_merged
63 | FROM
64 | merge_proposal
65 | LEFT JOIN new_result_branch ON new_result_branch.revision = merge_proposal.revision
66 | LEFT JOIN run ON run.id = new_result_branch.run_id
67 | WHERE url = $1
68 | """
69 | return await conn.fetchrow(query, url)
70 |
71 |
72 | async def get_publishes(conn, url):
73 | return await conn.fetch(
74 | """
75 | SELECT * FROM publish WHERE merge_proposal_url = $1
76 | ORDER BY timestamp DESC
77 | """,
78 | url,
79 | )
80 |
81 |
82 | async def write_merge_proposal(db, url):
83 | async with db.acquire() as conn:
84 | proposal = await get_proposal_with_run(conn, url)
85 |
86 | publishes = await get_publishes(conn, url)
87 |
88 | return {
89 | "proposal": proposal,
90 | "publishes": publishes,
91 | }
92 |
--------------------------------------------------------------------------------
/py/janitor/site/cupboard/publish.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from aiohttp import web
4 |
5 |
6 | async def iter_publish_history(conn, limit: Optional[int] = None):
7 | query = """
8 | SELECT
9 | publish.id, publish.timestamp, publish.branch_name,
10 | publish.mode, publish.merge_proposal_url, publish.result_code,
11 | publish.description, codebase.web_url, publish.codebase AS codebase
12 | FROM
13 | publish
14 | LEFT JOIN codebase ON codebase.name = publish.codebase
15 | ORDER BY timestamp DESC
16 | """
17 | if limit:
18 | query += f" LIMIT {limit}"
19 | return await conn.fetch(query)
20 |
21 |
22 | async def write_history(conn, limit: Optional[int] = None):
23 | return {
24 | "count": limit,
25 | "history": await iter_publish_history(conn, limit=limit),
26 | }
27 |
28 |
29 | async def write_publish(conn, publish_id):
30 | query = """
31 | SELECT
32 | publish.id AS id,
33 | publish.timestamp AS timestamp,
34 | publish.branch_name AS branch_name,
35 | publish.mode AS mode,
36 | publish.merge_proposal_url AS merge_proposal_url,
37 | publish.result_code AS result_code,
38 | publish.description AS description,
39 | codebase.web_url AS vcs_browse,
40 | codebase.name AS codebase
41 | FROM
42 | publish
43 | LEFT JOIN codebase ON codebase.name = publish.codebase
44 | WHERE id = $1
45 | """
46 | publish = await conn.fetchrow(query, publish_id)
47 | if publish is None:
48 | raise web.HTTPNotFound(text=f"no such publish: {publish_id}")
49 | return {"publish": publish}
50 |
--------------------------------------------------------------------------------
/py/janitor/site/merge_proposals.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | from typing import Optional
4 |
5 | import asyncpg
6 |
7 |
8 | async def get_proposals_with_run(conn: asyncpg.Connection, suite: Optional[str]):
9 | query = """
10 | SELECT
11 | DISTINCT ON (merge_proposal.url)
12 | run.codebase AS codebase,
13 | run.suite AS suite,
14 | merge_proposal.url AS url,
15 | merge_proposal.status AS status
16 | FROM
17 | merge_proposal
18 | LEFT JOIN new_result_branch ON new_result_branch.revision = merge_proposal.revision
19 | LEFT JOIN run ON run.id = new_result_branch.run_id
20 | """
21 | args = []
22 | if suite:
23 | args.append(suite)
24 | query += """
25 | WHERE suite = $1
26 | """
27 | query += """
28 | ORDER BY merge_proposal.url, run.finish_time DESC
29 | """
30 | return await conn.fetch(query, *args)
31 |
32 |
33 | async def write_merge_proposals(db, suite):
34 | async with db.acquire() as conn:
35 | proposals_by_status: dict[str, list[asyncpg.Record]] = {}
36 | for row in await get_proposals_with_run(conn, suite=suite):
37 | proposals_by_status.setdefault(row["status"], []).append(row)
38 |
39 | merged = proposals_by_status.get("merged", []) + proposals_by_status.get(
40 | "applied", []
41 | )
42 | return {
43 | "suite": suite,
44 | "campaign": suite,
45 | "open_proposals": proposals_by_status.get("open", []),
46 | "merged_proposals": merged,
47 | "closed_proposals": proposals_by_status.get("closed", []),
48 | "rejected_proposals": proposals_by_status.get("rejected", []),
49 | "abandoned_proposals": proposals_by_status.get("abandoned", []),
50 | }
51 |
52 |
53 | async def get_proposal_with_run(conn: asyncpg.Connection, url: str):
54 | query = """
55 | SELECT
56 | run.codebase AS codebase,
57 | run.suite AS suite,
58 | merge_proposal.url AS url,
59 | merge_proposal.status AS status,
60 | merge_proposal.merged_at AS merged_at,
61 | merge_proposal.merged_by AS merged_by,
62 | merge_proposal.last_scanned AS last_scanned,
63 | merge_proposal.can_be_merged AS can_be_merged
64 | FROM
65 | merge_proposal
66 | LEFT JOIN new_result_branch ON new_result_branch.revision = merge_proposal.revision
67 | LEFT JOIN run ON run.id = new_result_branch.run_id
68 | WHERE url = $1
69 | """
70 | return await conn.fetchrow(query, url)
71 |
72 |
73 | async def get_publishes(conn, url):
74 | return await conn.fetch(
75 | """
76 | SELECT * FROM publish WHERE merge_proposal_url = $1
77 | ORDER BY timestamp ASC
78 | """,
79 | url,
80 | )
81 |
82 |
83 | async def write_merge_proposal(db, url):
84 | async with db.acquire() as conn:
85 | proposal = await get_proposal_with_run(conn, url)
86 |
87 | publishes = await get_publishes(conn, url)
88 |
89 | return {
90 | "proposal": proposal,
91 | "publishes": publishes,
92 | }
93 |
--------------------------------------------------------------------------------
/py/janitor/site/pubsub.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright (C) 2018 Jelmer Vernooij
3 | #
4 | # This program is free software; you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation; either version 2 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program; if not, write to the Free Software
16 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 |
18 | import asyncio
19 | import json
20 |
21 | from aiohttp import web
22 | from aiohttp_openmetrics import Gauge
23 |
24 | subscription_count = Gauge(
25 | "subscriptions", "Subscriptions per topic", labelnames=("topic",)
26 | )
27 |
28 |
29 | class Subscription:
30 | """A pubsub subscription."""
31 |
32 | def __init__(self, topic: "Topic") -> None:
33 | self.topic = topic
34 | self.queue: asyncio.Queue = asyncio.Queue()
35 | if topic.last:
36 | self.queue.put_nowait(topic.last)
37 |
38 | def __enter__(self):
39 | self.topic.subscriptions.add(self.queue)
40 | subscription_count.labels(self.topic.name).inc()
41 | return self.queue
42 |
43 | def __exit__(self, type, value, traceback):
44 | subscription_count.labels(self.topic.name).dec()
45 | self.topic.subscriptions.remove(self.queue)
46 |
47 |
48 | class Topic:
49 | """A pubsub topic."""
50 |
51 | def __init__(self, name, repeat_last: bool = False) -> None:
52 | self.name = name
53 | self.subscriptions: set[asyncio.Queue] = set()
54 | self.last = None
55 | self.repeat_last = repeat_last
56 |
57 | def publish(self, message):
58 | if self.repeat_last:
59 | self.last = message
60 | for queue in self.subscriptions:
61 | queue.put_nowait(message)
62 |
63 |
64 | async def pubsub_handler(topic: Topic, request) -> web.WebSocketResponse:
65 | ws = web.WebSocketResponse()
66 | await ws.prepare(request)
67 |
68 | with Subscription(topic) as queue:
69 | while True:
70 | msg = await queue.get()
71 | try:
72 | await ws.send_str(json.dumps(msg))
73 | except TypeError as e:
74 | raise TypeError(f"not jsonable: {msg!r}") from e
75 | except ConnectionResetError:
76 | break
77 |
78 | return ws
79 |
--------------------------------------------------------------------------------
/py/janitor/site/templates/about.html:
--------------------------------------------------------------------------------
1 | {% extends "layout.html" %}
2 | {% block page_title %}
3 | About
4 | {% endblock page_title %}
5 | {% block body %}
6 |
7 |
About
8 |
9 | The initial version of the Janitor was written by Jelmer Vernooij.
10 |
11 |
12 | The code for the Debian-specific instance has now been split out into a separate
13 | repository.
14 |
15 |
Many thanks to everybody who has provided feedback and helped make the Janitor better.
16 |
Source Code
17 |
The Janitor itself is mostly written in Python, using the following libraries and services:
18 |
19 |
20 | The style for the website is based on Alabaster 0.7.8 theme for sphinx.
21 |
22 |
23 | Breezy
24 | provides abstractions over the version control system (Git, Bazaar,
25 | Mercurial, Subversion) and the supported hosting platforms (GitHub,
26 | GitLab, Launchpad).
27 |
28 |
29 | Silver-Platter
31 | ties this all together; it manages branches, invokes codemods and pushes back or creates
32 | merge proposals.
33 |
33 | To install the packages from this changeset, use the following
34 | sources configuration (with archive keyring stored in
35 | /etc/apt/keyrings/debian-janitor.gpg):
36 |
37 |
38 |
39 |
40 |
41 | deb "[arch=amd64 signed-by=/etc/apt/keyrings/debian-janitor.gpg]" {{ url.join(URL('/')) }} cs/{{ changeset.id }} main
42 | deb-src "[arch=amd64 signed-by=/etc/apt/keyrings/debian-janitor.gpg]" {{ url.join(URL('/')) }} cs/{{ changeset.id }} main
43 |
Simply give the bot commit access to your repository, and it will push fixes rather than proposing them.
7 |
8 | The bot will need permission to push to the relevant branches. For
9 | repositories on a GitLab instances (such as salsa) this means that
10 | it will need developer permissions if the relevant branch is unprotected,
11 | and maintainer permissions if the relevant branch is protected.
12 | See the GitLab permissions guide for details.
13 |
6 | The Janitor will automatically reschedule processing of packages with a
7 | conflicted merge proposal. Once a conflict appears, it may take a couple of
8 | hours before the merge proposal is updated.
9 |
10 |
11 | It will also regularly rebase merge proposals on the packaging branch. It
12 | can take several days before this happens, since there is no mechanism to
13 | notify the Janitor of new commits. You can manually trigger a rerun
14 | from the package-specific page linked from the merge proposal.
15 |
18 | Work is under way to also support Mercurial. Subversion support may
19 | also be an option, though I have yet to work out what the equivalent of
20 | pull requests in Subversion would be.
21 |
2 | Attempting to access the packaging branch resulted in a HTTP
3 | 401 Unauthorized error. This can indicate that the repository is
4 | private, that accessing it may require log in, or that it simply does not
5 | exist.
6 |
7 |
8 | This can happen due to Breezy not sending credentials to GitLab
9 | repositories. See
10 | this bug
11 | for details.
12 |
The package needs a Go package to be installed in order to build.
2 |
3 | However, the Go package is not available in the APT repository for
4 | the distribution and can thus not be added to the build dependencies
5 | for the package that's being built.
6 |
The package contains a path that isn't unicode normalized.
2 |
3 | Breezy currently prohibits this, because it results in issues checking out
4 | files on Mac OS X. See this bug
5 | about loosening the constraints.
6 |
2 | The upstream version that was picked contains characters that
3 | are invalid in Debian version strings.
4 |
5 |
6 | For version strings that come from upstream tags, this can
7 | be because the upstream tags use characters that are not valid
8 | in Debian version strings. The Janitor currently only applies
9 | very basic version mangling to upstream tags:
10 |
11 |
12 | Strip release- prefixes and -release suffixes
13 |
14 |
15 | Strip package- prefixes
16 |
17 |
18 | Strip v prefixes
19 |
20 |
21 | Replace any underscores with dots if there are no other
22 | dots in the version. This is done for compatibility with CVS style tags,
23 | which usually did not use dots.
24 |
25 |
26 |
27 | For version strings that come from uscan, no additional mangling
28 | is performed besides the mangling that uscan already does.
29 |
30 |
31 | In some cases, the version string matching is overly broad - and the
32 | lintian-brush could possibly replace the first group with @ANY_VERSION@ to
33 | fix the watch file.
34 |
2 | The management plane for the worker no longer knows about the
3 | job that was processing the run. This can happen because e.g.
4 | the management plane for the worker was restarted, or because the worker
5 | failed to upload its results to the runner, possibly because the
6 | runner was down or rejected the uploads - it could be crashing
7 | while processing them.
8 |
2 | The build of the package timed out. This often means that it is stuck in an
3 | endless loop or did not generate output to standard out for a long time
4 | (usually an hour).
5 |
2 | There are changes between the upstream tarball and the non-debian/ part of
3 | the packaging repository that are not accounted for by any of the patches
4 | under debian/patches.
5 |
2 | The packaging branch is using a version control system that is not supported
3 | by the Janitor. At the moment, only Git and
4 | Bazaar repositories are supported.
5 |
The upstream branch could not be found; it may have been moved.
2 |
3 | The upstream branch will be taken from the Repository field in
4 | debian/upstream/metadata or guessed based on metadata in the source
5 | package.
6 |
7 |
8 | To fix this error, set the Repository field appropriately in
9 | debian/upstream/metadata.
10 |
2 | The location of the upstream repository of this package is unknown. The canonical location
3 | for this information is the Repository field in the debian/upstream/metadata file.
4 |
5 |
6 | The Janitor will also fall back to attempting to reading various other metadata files
7 | to figure out the upstream repository location, such as dist.ini (for perl packages)
8 | or setup.py (for Python packages).
9 |
The worker failed to respond to pings, and the run was therefore marked as lost.
2 |
3 | In some cases, this can happen because the runner is rejecting the uploads
4 | from the worker - e.g. because it consistently crashes while processing them.
5 |