├── .github └── workflows │ ├── test.yml │ └── update-docs.yml ├── .gitignore ├── .readthedocs.yaml ├── .typos.toml ├── LICENSE ├── README.md ├── config.toml.sample ├── docs ├── Makefile ├── alarm.rst ├── api.rst ├── cleanup.rst ├── conf.py ├── index.rst ├── make.bat ├── requirements.txt └── setup.rst ├── lilac ├── lilac2 ├── __init__.py ├── aliases.yaml ├── api.py ├── building.py ├── cmd.py ├── const.py ├── db.py ├── intl.py ├── l10n │ ├── en │ │ ├── mail.ftl │ │ └── main.ftl │ └── zh_CN │ │ ├── mail.ftl │ │ └── main.ftl ├── lilacpy.py ├── lilacyaml.py ├── mail.py ├── mediawiki2pkgbuild.py ├── nomypy.py ├── nvchecker.py ├── packages.py ├── pkgbuild.py ├── py.typed ├── pypi2pkgbuild.py ├── repo.py ├── slogconf.py ├── systemd.py ├── tools.py ├── typing.py ├── vendor │ ├── __init__.py │ ├── archpkg.py │ ├── github.py │ ├── htmlutils.py │ ├── mailutils.py │ ├── myutils.py │ ├── nicelogger.py │ ├── requestsutils.py │ └── serializer.py └── worker.py ├── lilaclib.py ├── mypy.ini ├── nvchecker_source ├── README.rst ├── archfiles.py ├── rpkgs.py ├── vcs.py └── vcs.sh ├── pyproject.toml ├── pytest.ini ├── recv_gpg_keys ├── schema-docs ├── Makefile ├── lilac-py-fields.md ├── lilac-yaml-schema.yaml ├── special-files.md └── yaml2json ├── scripts ├── at-maintainer ├── build-cleaner ├── cleanup-dblck ├── cleanup-dblck.service ├── dbsetup.sql ├── lilac-cleaner ├── pre-commit ├── sendmail-test ├── sign_and_copy ├── tailf-build-log ├── update-archpkg-to-alpm ├── useful.sql └── yaourt-G ├── setup.py └── tests ├── conftest.py ├── fixtures ├── mxnet-git-b628fc716d23ae88373c6bd1089409297ccb2a38.diff ├── mxnet-git-c80336319e1a3e60178d815a48690e90d2a0c889.diff ├── mxnet-git-c88817c10e95f9d9afd7928b973504c4085b4b6c.diff ├── nodejs-web-ext-e4d4a1c33026d221ebf6570cc0a33c99dc4b1d9d.diff └── python-onnxruntime-7447a82a3fac720bbb85ba5cea5d99f7d6920690.diff ├── test_api.py ├── test_dependency_resolution.py ├── test_lilaclib.py └── test_rpkgs.py /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: run tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | tests: 7 | runs-on: ubuntu-latest 8 | 9 | # Use the base-devel image of Arch Linux for building pyalpm 10 | container: archlinux:base-devel 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v4 15 | 16 | - name: Enable archlinuxcn repo 17 | run: | 18 | echo -e '[archlinuxcn]\nServer = https://repo.archlinuxcn.org/$arch' >> /etc/pacman.conf 19 | pacman-key --init 20 | pacman-key --populate 21 | pacman -Sy --noconfirm archlinuxcn-keyring 22 | 23 | - name: Install Python and deps 24 | run: pacman -Su --noconfirm python python-pytest python-pytest-asyncio nvchecker python-requests python-lxml python-yaml pyalpm python-structlog python-prctl python-fluent.runtime 25 | 26 | - name: Run pytest 27 | run: pytest 28 | -------------------------------------------------------------------------------- /.github/workflows/update-docs.yml: -------------------------------------------------------------------------------- 1 | name: Update lilac.yaml docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - schema-docs/lilac-yaml-schema.yaml 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | with: 20 | fetch-depth: 0 21 | - name: Set up Python 22 | uses: actions/setup-python@v2 23 | with: 24 | python-version: '3.8' 25 | - name: Install Python dependencies 26 | run: pip install PyYAML 27 | - name: Set up NodeJS 28 | uses: actions/setup-node@v1 29 | with: 30 | node-version: 14.x 31 | - name: Install NodeJS dependencies 32 | run: npm install -g bootprint bootprint-json-schema 33 | 34 | - name: Configure git 35 | run: | 36 | git config --global user.name 'github-actions' 37 | git config --global user.email 'github-actions@github.com' 38 | - name: Update docs 39 | run: | 40 | cd schema-docs 41 | make 42 | - name: Commit changes 43 | run: | 44 | cd schema-docs/build/docs 45 | git push 46 | 47 | - uses: actions/checkout@v2 48 | with: 49 | repository: archlinuxcn/repo 50 | path: repo 51 | ssh-key: ${{ secrets.REPO_DEPLOY_KEY }} 52 | 53 | - name: Update archlinuxcn/repo 54 | run: | 55 | cp schema-docs/lilac-yaml-schema.yaml repo 56 | cd repo 57 | if [[ -n "$(git status -s)" ]]; then 58 | git add lilac-yaml-schema.yaml 59 | git commit -m 'update lilac-yaml-schema.yaml' 60 | git push 61 | fi 62 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | pylib/ 2 | __pycache__/ 3 | *.sw* 4 | .pytest_cache/ 5 | config.toml 6 | 7 | # Distribution / packaging 8 | build/ 9 | dist/ 10 | .eggs/ 11 | sdist/ 12 | *.egg-info/ 13 | _build/ 14 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: "3.10" 7 | 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | python: 12 | install: 13 | - requirements: docs/requirements.txt 14 | -------------------------------------------------------------------------------- /.typos.toml: -------------------------------------------------------------------------------- 1 | [default.extend-identifiers] 2 | update_ons = "update_ons" 3 | O_WRONLY = "O_WRONLY" 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![lilac.png](https://github.com/archlinuxcn/artworks/raw/master/lilac-logo/example%20banners/banner-small.png) 2 | 3 | [![Documentation Status](https://readthedocs.org/projects/lilac/badge/?version=latest)](https://lilac.readthedocs.io/en/latest/) 4 | 5 | What does lilac do? 6 | ---- 7 | 8 | lilac is an automatic packaging tool for Arch Linux. It basically does the following things: 9 | 10 | * figure out which packages have been updated or need rebuilding by calling [nvchecker](https://github.com/lilydjwg/nvchecker) 11 | * figure out the order to build packages 12 | * generate or update PKGBUILDs with custom configuration scripts ([common routines available](https://lilac.readthedocs.io/en/latest/api.html)) 13 | * call devtools to actually build packages 14 | * handle built packages over to [archrepo2](https://github.com/lilydjwg/archrepo2) to update the repository database 15 | * report any errors to maintainers via mail 16 | 17 | Docs 18 | ---- 19 | 20 | * [lilac.yaml](https://archlinuxcn.github.io/lilac/) 21 | * [lilac.py API](https://lilac.readthedocs.io/en/latest/api.html) 22 | * [nvchecker usage](https://nvchecker.readthedocs.io/en/latest/usage.html) (used in the `update_on` field of `lilac.yaml`) 23 | * [Setup and run your own](https://lilac.readthedocs.io/en/latest/) 24 | 25 | Update 26 | ---- 27 | 28 | ### 2024-06-28 29 | 30 | if database is in use, run the following SQL to update: 31 | 32 | ```sql 33 | 34 | alter table lilac.pkglog add column maintainers jsonb; 35 | ``` 36 | 37 | 38 | License 39 | ------- 40 | 41 | This project is licensed under GPLv3. 42 | -------------------------------------------------------------------------------- /config.toml.sample: -------------------------------------------------------------------------------- 1 | [envvars] 2 | TZ = "Asia/Shanghai" 3 | TERM = "xterm" 4 | # this doesn't help with Python itself; please set externally if desirable 5 | # LANG = "zh_CN.UTF-8" 6 | 7 | [repository] 8 | name = "archlinuxcn" 9 | # The email address where undirected nvchecker error reports should go. 10 | email = "repo@example.com" 11 | # this can point into a directory inside the git repo. 12 | repodir = "/path/to/gitrepo" 13 | # The path where built packages and signatures are copied to 14 | # comment out if there's no need to copy built packages 15 | destdir = "/path/to/pkgdir" 16 | 17 | [lilac] 18 | # this is the name in the mail header and subject 19 | name = "lilac" 20 | # where lilac sends mails from 21 | email = "lilac@example.com" 22 | # for internal error reports 23 | master = "Your Name " 24 | # Set and unsubscribe_address to receive unsubscribe requests 25 | # unsubscribe_address = "unsubscribe@example.com" 26 | # Set to yes to automatically rebuild packages which failed to build last time 27 | rebuild_failed_pkgs = true 28 | git_push = false 29 | # Set a prefix for commit messages 30 | # commit_msg_prefix = '' 31 | send_email = false 32 | # Optional: template for log file URL. Used in package error emails 33 | logurl = "https://example.com/${pkgbase}/${datetime}.html" 34 | # for searching github; this is NOT for nvchecker, which should be configured via ~/.lilac/nvchecker_keyfile.toml 35 | # github_token = "xxx" 36 | 37 | # keep build logs; you need to manually run the script "scripts/dbsetup.sql" once 38 | # requires SQLAlchemy and a corresponding driver 39 | # dburl = "postgresql:///" 40 | # the schema to use; by default lilac uses the schema "lilac" 41 | # schema = "lilac" 42 | max_concurrency = 1 43 | 44 | [nvchecker] 45 | # set proxy for nvchecker 46 | # proxy = "http://localhost:8000" 47 | 48 | [smtp] 49 | # You can configure a SMTP account here; it defaults to localhost:53 50 | #host = "" 51 | #port = 0 52 | #use_ssl = false 53 | #username = "" 54 | #password = "" 55 | # Set to true to allow ANSI characters in content 56 | #use_ansi = false 57 | 58 | [bindmounts] 59 | # bind mounts in the devtools environment, e.g. for caching 60 | # source directories will be created if not yet 61 | "~/.cache/archbuild-bind-cache" = "/build/.cache" 62 | "~/.cache/archbuild-bind-cache/ghcup" = "/build/.ghcup" 63 | "~/.cache/pip" = "/build/.cache/pip" 64 | "~/.cargo" = "/build/.cargo" 65 | 66 | [misc] 67 | # run some commands before each run 68 | # prerun = [ 69 | # ["sudo", "rsync-packages-pool"], 70 | # ] 71 | 72 | # run some commands after each run 73 | postrun = [ 74 | # ["upload-packages"], 75 | ] 76 | 77 | # run some commands after each package built 78 | # env: PKGBASE, RESULT=successful, failed, skipped, staged, VERSION 79 | # postbuild = [ 80 | # [...] 81 | # ] 82 | 83 | # mount these paths as tmpfs in the chroot 84 | tmpfs = [ 85 | # bazel's cache causes failures frequently 86 | "/build/.cache/bazel" 87 | ] 88 | 89 | # pacman.conf to use for repository databases 90 | pacman_conf = "/etc/pacman.conf" 91 | 92 | # vim: se ft=toml: 93 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/alarm.rst: -------------------------------------------------------------------------------- 1 | Notes on Arch Linux ARM 2 | ======================= 3 | 4 | devtools 5 | -------- 6 | 7 | Install ``devtools-archlinuxcn``, ``devtools-cn-git`` and ``devtools-arm-git`` 8 | 9 | ``alarm/devtools-alarm`` is broken because ``MAKEFLAGS`` and ``PACKAGER`` aren't passed in. (It also doesn't ship ``extra-aarch64-build``) 10 | 11 | ``extra/devtools`` is broken because it cannot handle alarm mirror URL. 12 | 13 | Building for a different arch 14 | ----------------------------- 15 | 16 | ArchLinuxARM does not use the same file for `any` packages across different architectures. This means that chroot build for a different architecture (e.g. building armv7h on aarch64) may not work due to package signature mismatch. 17 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | ``lilac2.api`` --- The ``lilac.py`` API 2 | =========================================== 3 | 4 | .. automodule:: lilac2.api 5 | :members: 6 | :undoc-members: 7 | :imported-members: 8 | :exclude-members: Path, Container, SimpleNamespace, quote 9 | 10 | .. py:data:: s 11 | :type: requests.Session 12 | 13 | A shared `requests.Session `_ object to send out HTTP requests. 14 | 15 | -------------------------------------------------------------------------------- /docs/cleanup.rst: -------------------------------------------------------------------------------- 1 | Routine Cleanups 2 | ================ 3 | 4 | tmpfiles.d 5 | ---------- 6 | 7 | This page records things should be configured to clean things up. Adjust if you want. 8 | 9 | Pacman package cache: ``/etc/tmpfiles.d/pkgcache.conf``:: 10 | 11 | e /var/cache/pacman - - - 20d 12 | 13 | Lilac build logs and caches for builds: ``~/.config/user-tmpfiles.d``:: 14 | 15 | e %h/.lilac/log - - - 30d 16 | 17 | e %h/.cargo/registry/cache/* - - - 30d 18 | e %h/.cargo/registry/src/* - - - 30d 19 | e %h/.cargo/checkouts/* - - - 30d 20 | 21 | e %h/.cache/archbuild-bind-cache/* - - - 30d 22 | e %h/.cache/pip/* - - - 30d 23 | 24 | Start the timer: 25 | 26 | .. code-block:: sh 27 | 28 | systemctl --user enable --now systemd-tmpfiles-clean.timer 29 | 30 | Cron jobs or systemd.timer 31 | -------------------------- 32 | 33 | You need to run these scripts periodically: 34 | 35 | build-cleaner 36 | 37 | Clean up chroots used by devtools: these chroots are re-usable but they can be too many. 38 | 39 | This script comes with lilac and requires root privileges. 40 | 41 | lilac-cleaner 42 | 43 | Clean up files downloaded and extracted during packaging. They are in the git repository, side-by-side with PKGBUILDs and built packages. 44 | 45 | This script can be run without installing lilac so you can also run this script locally. 46 | 47 | This script comes with lilac. Note a ``-f`` argument is needed to actually delete files. 48 | 49 | repocleaner 50 | 51 | Clean up old packages inside the package repository. 52 | 53 | This script is at `repocleaner `_ and should be edited before running. It should be run with root privileges on the server where the package repository is managed. 54 | 55 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | import os 8 | import sys 9 | 10 | # -- Path setup -------------------------------------------------------------- 11 | sys.path.insert(0, os.path.abspath("..")) 12 | 13 | # If extensions (or modules to document with autodoc) are in another directory, 14 | # add these directories to sys.path here. If the directory is relative to the 15 | # documentation root, use os.path.abspath to make it absolute, like shown here. 16 | # 17 | # import os 18 | # import sys 19 | # sys.path.insert(0, os.path.abspath('.')) 20 | 21 | 22 | # -- Project information ----------------------------------------------------- 23 | 24 | project = 'lilac' 25 | copyright = '2021, lilydjwg et al.' 26 | author = 'lilydjwg et al.' 27 | 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | "sphinx.ext.autodoc", 36 | "sphinx.ext.doctest", 37 | "sphinx.ext.intersphinx", 38 | "sphinx.ext.viewcode", 39 | ] 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ['_templates'] 43 | 44 | # List of patterns, relative to source directory, that match files and 45 | # directories to ignore when looking for source files. 46 | # This pattern also affects html_static_path and html_extra_path. 47 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 48 | 49 | 50 | # -- Options for HTML output ------------------------------------------------- 51 | 52 | # Add any paths that contain custom static files (such as style sheets) here, 53 | # relative to this directory. They are copied after the builtin static files, 54 | # so a file named "default.css" will overwrite the builtin "default.css". 55 | html_static_path = ['_static'] 56 | 57 | html_theme = "sphinx_rtd_theme" 58 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 59 | 60 | # On RTD we can't import sphinx_rtd_theme, but it will be applied by 61 | # default anyway. This block will use the same theme when building locally 62 | # as on RTD. 63 | if not on_rtd: 64 | import sphinx_rtd_theme 65 | 66 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 67 | 68 | html_theme_options = { 69 | 'collapse_navigation': False, 70 | } 71 | 72 | autodoc_mock_imports = ["pyalpm"] 73 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. lilac documentation master file, created by 2 | sphinx-quickstart on Sun May 30 17:00:18 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to lilac's documentation! 7 | ================================= 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | setup 13 | cleanup 14 | alarm 15 | api 16 | 17 | 18 | Indices and tables 19 | ================== 20 | 21 | * :ref:`genindex` 22 | * :ref:`modindex` 23 | * :ref:`search` 24 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | tomli-w 2 | requests 3 | lxml 4 | PyYAML 5 | structlog 6 | fluent.runtime 7 | 8 | sphinx>=3.2 9 | # <5 has strange bottom margins for p, and no list indicators 10 | sphinx-rtd-theme>=0.5 11 | -------------------------------------------------------------------------------- /docs/setup.rst: -------------------------------------------------------------------------------- 1 | Setup lilac 2 | =========== 3 | 4 | In this article we'll see how to setup and configure lilac. 5 | 6 | Installation 7 | ------------ 8 | 9 | It's recommended to run lilac on full-fledged Arch Linux (or derived) system, not in a Docker container or a different distribution. 10 | 11 | An easy way to install lilac and its dependencies is to install the ``lilac-git`` package from the `[archlinuxcn] repository `_ or AUR. 12 | 13 | As a workaround, instead of ``devtools``, ``devtools-archlinuxcn`` from ``[archlinuxcn]`` should be used until `this --keep-unit issue `_ is resolved. 14 | 15 | .. code-block:: sh 16 | 17 | pacman -Syu lilac-git devtools-archlinuxcn 18 | 19 | Lilac can store (and in the future use) a kind of build logs in a database. Let's use PostgreSQL this time. Support for other database may come in the future. 20 | 21 | To use PostgreSQL, the following dependencies need to be installed (besides the database itself): 22 | 23 | .. code-block:: sh 24 | 25 | pacman -S python-psycopg2 26 | 27 | Lilac can send error reports via email. A local mail transfer agent (MTA) is preferred (e.g. Postfix) but a remote one is supported too. We'll disable this in this article. 28 | 29 | User and Data 30 | ------------- 31 | 32 | Lilac needs a normal Linux user to run. You can create a dedicated user: 33 | 34 | .. code-block:: sh 35 | 36 | useradd -m -g pkg lilac 37 | 38 | The ``pkg`` group is created by the ``pid_children-git`` package, which lilac uses to clean up subprocesses. Users in this group have the power to kill subprocesses with root privileges. We'll configure this group to be able to build packages with devtools. 39 | 40 | Remember to fully terminate existing processes and re-login (or more easily, reboot) to get the group privileges applied. 41 | 42 | Lilac will use ``~/.lilac`` to store various data including build logs. 43 | 44 | Make sure in ``/etc/makepkg.conf`` or similar files there aren't any changes to ``PKGDEST`` or the like, or lilac won't find them. 45 | 46 | The ``PKGBUILD`` files needs to be in a git repo. A subdirectory inside it is recommended. 47 | 48 | Setup a passphrase-less GPG key for the build user to sign packages: 49 | 50 | .. code-block:: sh 51 | 52 | gpg --gen-key 53 | 54 | Create a git repository for ``PKGBUILD``\ s and push it somewhere (e.g. GitHub). The directory structure is as follows:: 55 | 56 | worktree root 57 | ├── README.md 58 | ├── .gitignore 59 | └── myrepo 60 | ├── pkgbase1 61 | │   ├── lilac.yaml 62 | │   └── PKGBUILD 63 | ├── pkgbase2 64 | │   ├── lilac.yaml 65 | │   ├── PKGBUILD 66 | └── pkgbase3 67 | ├── lilac.yaml 68 | ├── PKGBUILD 69 | └── pkgbase3.install 70 | 71 | It contains a directory of pkgbase-named directories which contain PKGBUILDs, and a couple of other files you may want. 72 | 73 | Generate a pair of ssh keys, and configure ssh / git so that you can push via ssh to the git repository above. 74 | 75 | .. code-block:: sh 76 | 77 | ssh-keygen -t ed25519 78 | 79 | Configure lilac 80 | --------------- 81 | 82 | It's time to configure lilac now. Login as the user which lilac will run as first. Other than a fresh login, we can switch users with ``machinectl`` (don't use ``su`` or ``sudo`` to switch user, but if you can use ``sudo machinectl`` if needed): 83 | 84 | .. code-block:: sh 85 | 86 | machinectl shell lilac@ 87 | 88 | Clone the git repository for ``PKGBUILD``\ s: 89 | 90 | .. code-block:: sh 91 | 92 | git clone git@github.com:myorg/myrepo-pkgbuilds 93 | 94 | Create a directory for built packages: 95 | 96 | .. code-block:: sh 97 | 98 | mkdir ~/packages 99 | 100 | Copy ``/usr/share/doc/lilac/config.toml.sample`` to ``~/.lilac/config.toml`` and edit it. We'll change the following options in this article. 101 | 102 | In the ``[envvars]`` section we set ``TZ`` to control the timezone lilac uses. It affects timestamps in various places including the log file. 103 | 104 | In the ``[repository]`` section: 105 | 106 | name 107 | the repository name: ``myrepo`` in this article. 108 | 109 | email 110 | an email address for undirected error reports (e.g. a list address that all maintainers will receive messages from): ``repo@localhost`` in this article. 111 | 112 | repodir 113 | path to the directory containing all the ``PKGBUILD`` directories: ``/home/lilac/myrepo-pkgbuilds/myrepo`` in this article. 114 | 115 | destdir 116 | where built packages go: ``/home/lilac/packages`` in this article. 117 | 118 | In the ``[lilac]`` section: 119 | 120 | name 121 | the bot's name. The error report mails will be sent from this name and have it in the subject. 122 | 123 | email 124 | the address where lilac sends mails from. This should be the same one lilac uses for git commits. 125 | 126 | master 127 | email address of the admin of this lilac instance. In case of any unhandled errors a report will be sent here. E.g. ``Admin ``. 128 | 129 | rebuild_failed_pkgs 130 | Whether to rebuild failed packages. We assume a failed package won't recover by itself and so set to ``false`` to avoid needless rebuilds. 131 | 132 | git_push 133 | If this is set to ``true``, lilac pushes updated ``PKGBUILD``\ s to the remote git repository. We also need to generate a ssh key and configure it so that git pushes succeed. In this article we keep it ``false``. 134 | 135 | send_email 136 | We'll disable this and keep it ``false``. No error reports will be sent in this case. 137 | 138 | logurl 139 | We can make the build logs public via HTTP(S) with some web server, e.g. https://github.com/imlonghao/archlinuxcn-packages. This option configures the URL pointing to the log, and will appear in the error report. 140 | 141 | Three placeholder is available: 142 | 143 | - pkgbase 144 | - datetime: a ``%Y-%m-%dT%H:%M:%S`` format time when this batch of build starts (corresponding to the directory name in ``~/.lilac/log``) 145 | - timestamp: UNIX timestamp (in seconds) when the error report generates 146 | 147 | github_token 148 | A GitHub token to retrieve maintainer's public email address from their login, so they don't need to configure an email address in ``lilac.yaml``. 149 | 150 | dburl 151 | The database URL in SQLAlchemy's format. For local PostgreSQL we use ``postgresql:///``. 152 | 153 | max_concurrency 154 | limit the concurrent builds at the same time. 155 | 156 | If you track GitHub or GitLab, get your API tokens and put your keyfile at ``~/.lilac/nvchecker_keyfile.toml`` (see `nvchecker's documentation `_ for details). 157 | 158 | Configure other parts 159 | --------------------- 160 | 161 | Setup the database server if you don't already have one (run as root): 162 | 163 | .. code-block:: sh 164 | 165 | pacman -S postgresql 166 | su - postgres -c "initdb --locale en_US.UTF-8 -D '/var/lib/postgres/data'" 167 | systemctl enable --now postgresql 168 | 169 | Create the database user and database if needed: 170 | 171 | .. code-block:: sh 172 | 173 | su - postgres -c 'createuser lilac' 174 | su - postgres -c 'createdb -O lilac lilac' 175 | 176 | You should be able to login into the database server now. 177 | 178 | Setup the database tables (run as lilac): 179 | 180 | .. code-block:: sh 181 | 182 | psql ANY_ARGS_YOU_MAY_NEED < /usr/share/doc/lilac/dbsetup.sql 183 | 184 | Edit ``/etc/sudoers`` like:: 185 | 186 | Defaults env_keep += "PACKAGER MAKEFLAGS GNUPGHOME BUILDTOOL LOGDEST" 187 | 188 | %pkg ALL= NOPASSWD: /usr/bin/build-cleaner, /usr/bin/extra-x86_64-build, /usr/bin/multilib-build 189 | 190 | The first line to allow setting some environment variables and the second line is to configure packagers to run build commands without a password. You should add all devtools commands you'll need to run. ``build-cleaner`` is a script to clean up build chroots which lilac may run. 191 | 192 | Add something like this to ``/etc/profile.d/build.sh`` (at least update the domain name): 193 | 194 | .. code-block:: sh 195 | 196 | NPROC="$(nproc)" 197 | export MAKEFLAGS="-j$NPROC" 198 | unset NPROC 199 | 200 | if groups | grep -q "\"; then 201 | export PACKAGER="$USER <$USER@example.org>" 202 | fi 203 | 204 | To avoid using too much CPU, you can use cgroups v2 and put the following in ``/etc/systemd/system/user@.service.d/resources.conf`` to fairly share CPU among users (and between system and users). 205 | 206 | .. code-block:: ini 207 | 208 | [Service] 209 | CPUWeight=100 210 | 211 | To avoid the OOM Killer killing maintainer's processes unfairly, add ``OOMScoreAdjust=0`` to the above file, and set ``DefaultOOMScoreAdjust=0`` in ``/etc/systemd/user.conf``. 212 | 213 | If you have a lot of memory (e.g. >100G), you may want to mount ``/var/lib/archbuild`` as a tmpfs to speed up building. 214 | 215 | There is `[an issue] `_ preventing a ``git pull`` to succeed recently. Please set the following for the user running ``lilac`` and ``repocleaer`` to avoid issues: 216 | 217 | .. code-block:: sh 218 | 219 | git config --global maintenance.autoDetach false 220 | 221 | Run 222 | --- 223 | 224 | Let create our first lilac-managed package. 225 | 226 | In ``~/myrepo-pkgbuilds/myrepo`` create our package directory and ``PKGBUILD``: 227 | 228 | .. code-block:: sh 229 | 230 | mkdir testpkg && cd testpkg 231 | vim PKGBUILD 232 | 233 | Create a minimal `lilac.yaml` file like this: 234 | 235 | .. code-block:: yaml 236 | 237 | maintainers: 238 | - github: lilydjwg 239 | 240 | update_on: 241 | - source: manual 242 | manual: 1 243 | 244 | Create a git commit and push it somewhere. 245 | 246 | Now it's time to run ``lilac``: 247 | 248 | .. code-block:: sh 249 | 250 | lilac 251 | 252 | Check ``~/.lilac/log`` for the logs. If everything goes well, you can change the ``config.toml`` to do git pushes, send email reports, etc. 253 | 254 | Setup a cron job or systemd.timer to run ``lilac`` periodically. Don't forget to make the user instance of systemd always run: 255 | 256 | .. code-block:: sh 257 | 258 | loginctl enable-linger 259 | 260 | lilac only produces packages and put them in a directory, but doesn't update the pacman repository database. You may use `archrepo2 `_ to do that. 261 | 262 | Or you can upload packages to another server via the ``postrun`` config in ``~/.lilac/config.toml`` and run ``archrepo2`` and an HTTP server there. 263 | 264 | You can also setup a `HTTP service for build status and logs `_. 265 | 266 | There are a lot of files that are no longer needed. You'll need to setup `routine cleanup scripts `_ after things are working. 267 | 268 | `archlinuxcn/misc_scripts `_ contains some auxiliary scripts for maintenance and GitHub issues. 269 | 270 | -------------------------------------------------------------------------------- /lilac2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/archlinuxcn/lilac/e7df53c8c7933a1457fce4f3cfd448b5fbdc4371/lilac2/__init__.py -------------------------------------------------------------------------------- /lilac2/aliases.yaml: -------------------------------------------------------------------------------- 1 | python: 2 | source: alpm 3 | alpm: python 4 | from_pattern: '^(\d+\.\d+)\..*' 5 | to_pattern: '\1' 6 | ruby: 7 | source: alpm 8 | alpm: ruby 9 | from_pattern: ^(\d+\.\d+)\..* 10 | to_pattern: \1 11 | perl: 12 | source: alpm 13 | alpm: perl 14 | from_pattern: ^(\d+\.\d+)\..* 15 | to_pattern: \1 16 | r: 17 | source: alpm 18 | alpm: r 19 | from_pattern: '^(\d+\.\d+)\..*' 20 | to_pattern: '\1' 21 | lua: 22 | source: alpm 23 | alpm: lua 24 | from_pattern: '^(\d+\.\d+)\..*' 25 | to_pattern: '\1' 26 | boost: 27 | source: alpm 28 | alpm: boost-libs 29 | from_pattern: ^(\d+\.\d+)\..* 30 | to_pattern: \1 31 | icu: 32 | source: alpm 33 | alpm: icu 34 | from_pattern: ^(\d+)\..* 35 | to_pattern: \1 36 | readline: 37 | source: alpm 38 | alpm: readline 39 | from_pattern: ^(\d+)\..* 40 | to_pattern: \1 41 | clang: 42 | source: alpm 43 | alpm: clang 44 | from_pattern: ^(\d+)\..* 45 | to_pattern: \1 46 | protobuf: 47 | source: alpm 48 | alpm: protobuf 49 | provided: libprotobuf.so 50 | strip_release: true 51 | jsoncpp: 52 | source: alpm 53 | alpm: jsoncpp 54 | provided: libjsoncpp.so 55 | strip_release: true 56 | mediawiki: 57 | source: alpm 58 | alpm: mediawiki 59 | from_pattern: '^(\d+\.\d+)\..*' 60 | to_pattern: '\1' 61 | grpc: 62 | source: alpm 63 | alpm: grpc 64 | provided: libgrpc++.so 65 | strip_release: true 66 | libssl: 67 | source: alpm 68 | alpm: openssl 69 | provided: libssl.so 70 | strip_release: true 71 | libcrypto: 72 | source: alpm 73 | alpm: openssl 74 | provided: libcrypto.so 75 | strip_release: true 76 | spdlog: 77 | source: alpm 78 | alpm: spdlog 79 | provided: libspdlog.so 80 | strip_release: true 81 | fmt: 82 | source: alpm 83 | alpm: fmt 84 | provided: libfmt.so 85 | strip_release: true 86 | qt6-base: 87 | source: alpm 88 | alpm: qt6-base 89 | from_pattern: ^(\d+\.\d+)\..* 90 | to_pattern: \1 91 | openmpi: 92 | source: alpm 93 | alpm: openmpi 94 | provided: libmpi.so 95 | strip_release: true 96 | libgit2: 97 | source: alpm 98 | alpm: libgit2 99 | provided: libgit2.so 100 | strip_release: true 101 | -------------------------------------------------------------------------------- /lilac2/building.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | import logging 6 | import subprocess 7 | from typing import ( 8 | Optional, Iterable, List, Set, TYPE_CHECKING, 9 | ) 10 | import tempfile 11 | from pathlib import Path 12 | import time 13 | import json 14 | import threading 15 | import signal 16 | from contextlib import suppress 17 | 18 | from .typing import LilacInfo, Cmd, RUsage, PkgToBuild, OnBuildVers 19 | from .nvchecker import NvResults 20 | from .packages import Dependency, get_built_package_files 21 | from .tools import reap_zombies 22 | from .nomypy import BuildResult # type: ignore 23 | from . import systemd 24 | from . import intl 25 | 26 | if TYPE_CHECKING: 27 | from .repo import Repo 28 | assert Repo # type: ignore # make pyflakes happy 29 | del Repo 30 | 31 | logger = logging.getLogger(__name__) 32 | TLS = threading.local() 33 | 34 | class MissingDependencies(Exception): 35 | def __init__(self, pkgs: Set[str]) -> None: 36 | self.deps = pkgs 37 | 38 | class SkipBuild(Exception): 39 | def __init__(self, msg: str) -> None: 40 | self.msg = msg 41 | 42 | class BuildFailed(Exception): 43 | def __init__(self, msg: str) -> None: 44 | self.msg = msg 45 | 46 | def build_package( 47 | to_build: PkgToBuild, 48 | lilacinfo: LilacInfo, 49 | bindmounts: list[str], 50 | tmpfs: list[str], 51 | update_info: NvResults, 52 | commit_msg_template: str, 53 | depends: Iterable[Dependency], 54 | repo: Repo, 55 | myname: str, 56 | destdir: Path, 57 | logfile: Path, 58 | ) -> tuple[BuildResult, Optional[str]]: 59 | '''return BuildResult and version string if successful''' 60 | start_time = time.time() 61 | pkg_version = None 62 | rusage = None 63 | pkgbase = to_build.pkgbase 64 | try: 65 | maintainer = repo.find_maintainers(lilacinfo)[0] 66 | time_limit_hours = lilacinfo.time_limit_hours 67 | packager = '%s (on behalf of %s) <%s>' % ( 68 | myname, maintainer.name, maintainer.email) 69 | 70 | depend_packages = resolve_depends(repo, depends) 71 | pkgdir = repo.repodir / pkgbase 72 | try: 73 | pkg_version, rusage, error = call_worker( 74 | pkgbase = pkgbase, 75 | pkgdir = pkgdir, 76 | depend_packages = [str(x) for x in depend_packages], 77 | update_info = update_info, 78 | on_build_vers = to_build.on_build_vers, 79 | bindmounts = bindmounts, 80 | commit_msg_template = commit_msg_template, 81 | tmpfs = tmpfs, 82 | logfile = logfile, 83 | deadline = start_time + time_limit_hours * 3600, 84 | packager = packager, 85 | ) 86 | if error: 87 | raise error 88 | finally: 89 | may_need_cleanup() 90 | reap_zombies() 91 | 92 | staging = lilacinfo.staging 93 | if staging: 94 | destdir = destdir / 'staging' 95 | if not destdir.is_dir(): 96 | destdir.mkdir() 97 | sign_and_copy(pkgdir, destdir) 98 | if staging: 99 | l10n = intl.get_l10n('mail') 100 | notify_maintainers( 101 | repo, lilacinfo, 102 | l10n.format_value('package-staged-subject', { 103 | 'pkg': pkgbase, 104 | 'version': pkg_version, 105 | }), 106 | l10n.format_value('package-staged-body'), 107 | ) 108 | result = BuildResult.staged() 109 | else: 110 | result = BuildResult.successful() 111 | 112 | except SkipBuild as e: 113 | result = BuildResult.skipped(e.msg) 114 | except BuildFailed as e: 115 | result = BuildResult.failed(e.msg) 116 | except Exception as e: 117 | logger.exception('build failed with exception') 118 | result = BuildResult.failed(e) 119 | 120 | elapsed = time.time() - start_time 121 | result.rusage = rusage 122 | result.elapsed = elapsed 123 | with logfile.open('a') as f: 124 | t = time.strftime('%Y-%m-%d %H:%M:%S %z') 125 | print( 126 | f'\n[{t}] build (version {pkg_version}) finished in {int(elapsed)}s with result: {result!r}', 127 | file = f, 128 | ) 129 | return result, pkg_version 130 | 131 | def resolve_depends(repo: Optional[Repo], depends: Iterable[Dependency]) -> List[str]: 132 | need_build_first = set() 133 | depend_packages = [] 134 | 135 | for x in depends: 136 | p = x.resolve() 137 | if p is None: 138 | if repo is None or not repo.manages(x): 139 | # ignore depends that are not in repo 140 | continue 141 | need_build_first.add(x.pkgname) 142 | else: 143 | depend_packages.append(str(p)) 144 | 145 | if need_build_first: 146 | raise MissingDependencies(need_build_first) 147 | logger.info('depends: %s, resolved: %s', depends, depend_packages) 148 | 149 | return depend_packages 150 | 151 | def may_need_cleanup() -> None: 152 | st = os.statvfs('/var/lib/archbuild') 153 | if st.f_bavail * st.f_bsize < 60 * 1024 ** 3: 154 | subprocess.check_call(['sudo', 'build-cleaner']) 155 | 156 | def sign_and_copy(pkgdir: Path, dest: Path) -> None: 157 | pkgs = get_built_package_files(pkgdir) 158 | for pkg in pkgs: 159 | subprocess.run([ 160 | 'gpg', '--pinentry-mode', 'loopback', 161 | '--passphrase', '', '--detach-sign', '--', pkg, 162 | ]) 163 | for f in pkgs + [x.with_name(x.name + '.sig') for x in pkgs]: 164 | with suppress(FileExistsError): 165 | (dest / f.name).hardlink_to(f) 166 | 167 | def notify_maintainers( 168 | repo: Repo, lilacinfo: LilacInfo, 169 | subject: str, body: str, 170 | ) -> None: 171 | maintainers = repo.find_maintainers(lilacinfo) 172 | addresses = [str(x) for x in maintainers] 173 | repo.sendmail(addresses, subject, body) 174 | 175 | def call_worker( 176 | pkgbase: str, 177 | pkgdir: Path, 178 | logfile: Path, 179 | depend_packages: List[str], 180 | update_info: NvResults, 181 | on_build_vers: OnBuildVers, 182 | commit_msg_template: str, 183 | bindmounts: list[str], 184 | tmpfs: list[str], 185 | deadline: float, 186 | packager: str, 187 | ) -> tuple[Optional[str], RUsage, Optional[Exception]]: 188 | ''' 189 | return: package version, resource usage, error information 190 | ''' 191 | input = { 192 | 'depend_packages': depend_packages, 193 | 'update_info': update_info.to_list(), 194 | 'on_build_vers': on_build_vers, 195 | 'commit_msg_template': commit_msg_template, 196 | 'bindmounts': bindmounts, 197 | 'tmpfs': tmpfs, 198 | 'logfile': str(logfile), # for sending error reports 199 | 'worker_no': TLS.worker_no, 200 | } 201 | fd, resultpath = tempfile.mkstemp(prefix=f'{pkgbase}-', suffix='.lilac') 202 | os.close(fd) 203 | input['result'] = resultpath 204 | input_bytes = json.dumps(input).encode() 205 | logger.debug('worker input: %r', input_bytes) 206 | 207 | cmd = [sys.executable, '-Xno_debug_ranges', '-m', 'lilac2.worker', pkgbase] 208 | if systemd.available(): 209 | _call_cmd = _call_cmd_systemd 210 | else: 211 | _call_cmd = _call_cmd_subprocess 212 | name = f'lilac-worker-{TLS.worker_no}' 213 | rusage, timedout = _call_cmd( 214 | name, cmd, logfile, pkgdir, deadline, 215 | input_bytes, packager, 216 | ) 217 | 218 | try: 219 | with open(resultpath) as f: 220 | r = json.load(f) 221 | logger.debug('received from worker: %r', r) 222 | except json.decoder.JSONDecodeError: 223 | r = { 224 | 'status': 'failed', 225 | 'msg': 'worker did not return a proper result!', 226 | 'version': None, 227 | } 228 | finally: 229 | with suppress(FileNotFoundError): 230 | os.unlink(resultpath) 231 | 232 | st = r['status'] 233 | 234 | error: Optional[Exception] 235 | if timedout: 236 | error = TimeoutError() 237 | elif st == 'done': 238 | error = None 239 | elif st == 'skipped': 240 | error = SkipBuild(r['msg']) 241 | elif st == 'failed': 242 | error = BuildFailed(r['msg']) 243 | else: 244 | error = RuntimeError('unknown status from worker', st) 245 | 246 | version = r['version'] 247 | return version, rusage, error 248 | 249 | def _call_cmd_subprocess( 250 | name: str, 251 | cmd: Cmd, 252 | logfile: Path, 253 | pkgdir: Path, 254 | deadline: float, 255 | input: bytes, 256 | packager: str, 257 | ) -> tuple[RUsage, bool]: 258 | '''call cmd as a subprocess''' 259 | timedout = False 260 | env = os.environ.copy() 261 | env['PACKAGER'] = packager 262 | with logfile.open('wb') as logf: 263 | p = subprocess.Popen( 264 | cmd, 265 | stdin = subprocess.PIPE, 266 | stdout = logf, 267 | stderr = logf, 268 | cwd = pkgdir, 269 | env = env, 270 | ) 271 | p.stdin.write(input) # type: ignore 272 | p.stdin.close() # type: ignore 273 | 274 | while True: 275 | try: 276 | p.wait(10) 277 | except subprocess.TimeoutExpired: 278 | if time.time() > deadline: 279 | timedout = True 280 | # we need to rely on worker to kill child processes 281 | p.send_signal(signal.SIGINT) 282 | try: 283 | p.wait(3) 284 | except subprocess.TimeoutExpired: 285 | p.kill() 286 | else: 287 | break 288 | 289 | return RUsage(0, 0), timedout 290 | 291 | def _call_cmd_systemd( 292 | name: str, 293 | cmd: Cmd, 294 | logfile: Path, 295 | pkgdir: Path, 296 | deadline: float, 297 | input: bytes, 298 | packager: str, 299 | ) -> tuple[RUsage, bool]: 300 | '''run cmd with systemd-run and collect resource usage''' 301 | with logfile.open('wb') as logf: 302 | p = systemd.start_cmd( 303 | name, 304 | cmd, 305 | stdin = subprocess.PIPE, 306 | stdout = logf, 307 | stderr = logf, 308 | cwd = pkgdir, 309 | setenv = { 310 | 'PATH': os.environ['PATH'], # we've updated our PATH 311 | 'MAKEFLAGS': os.environ.get('MAKEFLAGS', ''), 312 | 'PACKAGER': packager, 313 | }, 314 | ) 315 | p.stdin.write(input) # type: ignore 316 | p.stdin.close() # type: ignore 317 | 318 | return systemd.poll_rusage(name, deadline) 319 | 320 | -------------------------------------------------------------------------------- /lilac2/cmd.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import logging 5 | import subprocess 6 | import signal 7 | import sys 8 | import re 9 | from subprocess import CalledProcessError 10 | from typing import Optional, Dict 11 | import types 12 | from pathlib import Path 13 | from contextlib import suppress 14 | 15 | from .typing import Cmd 16 | from . import intl 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | def _find_gitroot() -> Path: 21 | d = Path('.').resolve(strict=True) 22 | while d != d.parent: 23 | if (d / '.git').exists(): 24 | return d 25 | else: 26 | d = d.parent 27 | 28 | raise Exception('failed to find git root') 29 | 30 | def git_pull() -> bool: 31 | output = run_cmd(['git', 'pull', '--no-edit']) 32 | return 'up-to-date' not in output 33 | 34 | def git_pull_override() -> bool: 35 | try: 36 | env = os.environ.copy() 37 | env['LANG'] = 'en_US.UTF-8' 38 | with suppress(KeyError): 39 | del env['LANGUAGE'] 40 | output = run_cmd( 41 | ['git', 'pull', '--no-edit'], 42 | env = env, 43 | ) 44 | except subprocess.CalledProcessError as e: 45 | if 'would be overwritten by merge:' in e.output: 46 | files = [line.strip() 47 | for line in e.output.splitlines() 48 | if line.startswith('\t')] 49 | gitroot = _find_gitroot() 50 | for f in files: 51 | (gitroot / f).unlink() 52 | output = run_cmd(['git', 'pull', '--no-edit']) 53 | else: 54 | raise 55 | 56 | return 'up-to-date' not in output 57 | 58 | def git_push() -> None: 59 | while True: 60 | try: 61 | run_cmd(['git', 'push']) 62 | break 63 | except CalledProcessError as e: 64 | if 'non-fast-forward' in e.output or 'fetch first' in e.output: 65 | run_cmd(["git", "pull", "--rebase"]) 66 | else: 67 | raise 68 | 69 | def git_reset_hard() -> None: 70 | run_cmd(['git', 'reset', '--hard']) 71 | 72 | def get_git_branch() -> str: 73 | out = subprocess.check_output( 74 | ['git', 'branch', '--no-color'], universal_newlines = True) 75 | for line in out.splitlines(): 76 | if line.startswith('* '): 77 | return line.split(None, 1)[-1] 78 | 79 | return '(unknown branch)' 80 | 81 | def run_cmd( 82 | cmd: Cmd, *, 83 | use_pty: bool = False, 84 | silent: bool = False, 85 | cwd: Optional[os.PathLike] = None, 86 | env: Optional[Dict[str, str]] = None, 87 | ) -> str: 88 | logger.debug('running %r, %susing pty,%s showing output', cmd, 89 | '' if use_pty else 'not ', 90 | ' not' if silent else '') 91 | if use_pty: 92 | rfd, stdout = os.openpty() 93 | stdin = stdout 94 | # for fd leakage 95 | logger.debug('pty master fd=%d, slave fd=%d.', rfd, stdout) 96 | else: 97 | stdin = subprocess.DEVNULL 98 | stdout = subprocess.PIPE 99 | 100 | try: 101 | exited = False 102 | def child_exited(signum: int, sigframe: Optional[types.FrameType]) -> None: 103 | nonlocal exited 104 | exited = True 105 | old_hdl = signal.signal(signal.SIGCHLD, child_exited) 106 | 107 | p = subprocess.Popen( 108 | cmd, stdin = stdin, 109 | stdout = stdout, stderr = subprocess.STDOUT, 110 | cwd = cwd, env = env, 111 | ) 112 | if use_pty: 113 | os.close(stdout) 114 | else: 115 | assert p.stdout 116 | rfd = p.stdout.fileno() 117 | out = [] 118 | outlen = 0 119 | 120 | while True: 121 | try: 122 | r = os.read(rfd, 4096) 123 | if not r: 124 | if exited: 125 | break 126 | else: 127 | continue 128 | except InterruptedError: 129 | continue 130 | except OSError as e: 131 | if e.errno == 5: # Input/output error: no clients run 132 | break 133 | else: 134 | raise 135 | r = r.replace(b'\x0f', b'') # ^O 136 | if not silent: 137 | sys.stderr.buffer.write(r) 138 | out.append(r) 139 | outlen += len(r) 140 | if outlen > 1024 ** 3: # larger than 1G 141 | p.kill() 142 | 143 | code = p.wait() 144 | if old_hdl is not None: 145 | signal.signal(signal.SIGCHLD, old_hdl) 146 | 147 | outb = b''.join(out) 148 | outs = outb.decode('utf-8', errors='replace') 149 | outs = outs.replace('\r\n', '\n') 150 | outs = re.sub(r'.*\r', '', outs) 151 | if outlen > 1024 ** 3: # larger than 1G 152 | l10n = intl.get_l10n('mail') 153 | outs += '\n\n' + l10n.format_value('too-much-output') + '\n' 154 | if code != 0: 155 | # set output by keyword to avoid being included in repr() 156 | raise subprocess.CalledProcessError(code, cmd, output=outs) 157 | return outs 158 | finally: 159 | if use_pty: 160 | os.close(rfd) 161 | 162 | def pkgrel_changed(from_: str, to: str, pkgname: str) -> bool: 163 | cmd = ["git", "diff", "-p", from_, to, '--', pkgname + '/PKGBUILD'] 164 | r = run_cmd(cmd, silent=True).splitlines() 165 | return any(x.startswith('+pkgrel=') for x in r) 166 | 167 | UNTRUSTED_PREFIX: Cmd = [ 168 | 'bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home', 169 | '--tmpfs', '/run', '--die-with-parent', 170 | '--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', 171 | ] 172 | -------------------------------------------------------------------------------- /lilac2/const.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | import types 5 | 6 | mydir = Path('~/.lilac').expanduser() 7 | AUR_REPO_DIR = mydir / 'aur' 8 | AUR_REPO_DIR.mkdir(parents=True, exist_ok=True) 9 | PACMAN_DB_DIR = mydir / 'pacmandb' 10 | PACMAN_DB_DIR.mkdir(exist_ok=True) 11 | (mydir / 'gnupg').mkdir(exist_ok=True) 12 | 13 | SPECIAL_FILES = ('package.list', 'lilac.py', 'lilac.yaml', '.gitignore') 14 | OFFICIAL_REPOS = ('core', 'extra', 'multilib') 15 | 16 | _G = types.SimpleNamespace() 17 | # main process: 18 | # repo: Repo 19 | # mod: LilacMod 20 | # worker: 21 | # repo: Repo (for sending reports; not loading all lilacinfos) 22 | # mod: LilacMod 23 | # built_version: Optional[str] 24 | -------------------------------------------------------------------------------- /lilac2/db.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | import datetime 3 | import re 4 | import logging 5 | from functools import partial 6 | 7 | import psycopg2 8 | import psycopg2.pool 9 | 10 | from .typing import UsedResource, OnBuildEntry, OnBuildVers 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | USE = False 15 | Pool = None 16 | 17 | def connect_with_schema(schema, dsn): 18 | conn = psycopg2.connect(dsn) 19 | schema = schema or 'lilac' 20 | if "'" in schema: 21 | raise ValueError('bad schema', schema) 22 | with conn.cursor() as cur: 23 | cur.execute(f"set search_path to '{schema}'") 24 | return conn 25 | 26 | def setup(dsn, schema): 27 | global USE, Pool 28 | Pool = psycopg2.pool.ThreadedConnectionPool( 29 | 1, 10, dsn, partial(connect_with_schema, schema)) 30 | USE = True 31 | 32 | @contextmanager 33 | def get_session(): 34 | conn = Pool.getconn() 35 | try: 36 | with conn: 37 | with conn.cursor() as cur: 38 | yield cur 39 | finally: 40 | Pool.putconn(conn) 41 | 42 | def build_updated(s) -> None: 43 | s.execute('notify build_updated') 44 | 45 | def is_last_build_failed(pkgbase: str) -> bool: 46 | with get_session() as s: 47 | s.execute( 48 | '''select result from pkglog 49 | where pkgbase = %s 50 | order by ts desc limit 1''', (pkgbase,)) 51 | r = s.fetchall() 52 | 53 | return r and r[0] == 'failed' 54 | 55 | def mark_pkg_as(s, pkg: str, status: str) -> None: 56 | s.execute('update pkgcurrent set status = %s where pkgbase = %s', (status, pkg)) 57 | 58 | def get_pkgs_last_success_times(pkgs: list[str]) -> list[tuple[str, datetime.datetime]]: 59 | if not pkgs: 60 | return [] 61 | 62 | with get_session() as s: 63 | s.execute( 64 | '''select pkgbase, max(ts) from pkglog 65 | where pkgbase = any(%s) and result in ('successful', 'staged') 66 | group by pkgbase''', (pkgs,)) 67 | r = s.fetchall() 68 | return r 69 | 70 | def get_pkgs_last_rusage(pkgs: list[str]) -> dict[str, UsedResource]: 71 | if not pkgs: 72 | return {} 73 | 74 | with get_session() as s: 75 | s.execute(''' 76 | select pkgbase, cputime, memory, elapsed from ( 77 | select pkgbase, cputime, memory, elapsed, row_number() over (partition by pkgbase order by ts desc) as k 78 | from pkglog 79 | where pkgbase = any(%s) and result in ('successful', 'staged') 80 | ) as w where k = 1''', (pkgs,)) 81 | rs = s.fetchall() 82 | ret = {r[0]: UsedResource(r[1], r[2], r[3]) for r in rs} 83 | 84 | return ret 85 | 86 | def _get_last_two_versions(s, pkg: str) -> tuple[str, str]: 87 | s.execute( 88 | '''select pkg_version from pkglog 89 | where pkgbase = %s and result in ('successful', 'staged') 90 | order by ts desc limit 2''', (pkg,)) 91 | r = s.fetchall() 92 | 93 | if len(r) == 1: 94 | return '', r[0][0] 95 | elif len(r) == 2: 96 | return r[1][0], r[0][0] 97 | elif len(r) == 0: 98 | return '', '' 99 | else: 100 | raise RuntimeError('limit 2 returns more?!') 101 | 102 | def get_update_on_build_vers( 103 | update_on_build: list[OnBuildEntry], 104 | ) -> OnBuildVers: 105 | ret = [] 106 | 107 | with get_session() as s: 108 | for on_build in update_on_build: 109 | old, new = _get_last_two_versions(s, on_build.pkgbase) 110 | if not old and not new: 111 | logger.warning('no built info for %s but try to build on build it?', 112 | on_build.pkgbase) 113 | 114 | if (regex := on_build.from_pattern) and (repl := on_build.to_pattern): 115 | old = re.sub(regex, repl, old) 116 | new = re.sub(regex, repl, new) 117 | ret.append((old, new)) 118 | 119 | return ret 120 | -------------------------------------------------------------------------------- /lilac2/intl.py: -------------------------------------------------------------------------------- 1 | import os 2 | import locale 3 | 4 | from fluent.runtime import FluentLocalization, FluentResourceLoader 5 | 6 | cache = {} 7 | 8 | def get_l10n(name): 9 | if name not in cache: 10 | d = os.path.dirname(__file__) 11 | loc = locale.getlocale()[0] 12 | loader = FluentResourceLoader(f'{d}/l10n/{{locale}}') 13 | l10n = FluentLocalization([loc, "en"], [f'{name}.ftl'], loader) 14 | cache[name] = l10n 15 | return cache[name] 16 | -------------------------------------------------------------------------------- /lilac2/l10n/en/mail.ftl: -------------------------------------------------------------------------------- 1 | nonexistent-deps-subject = Non-existent dependencies is listed in lilac.yaml for { $pkg } 2 | nonexistent-deps-body = lilac.yaml of package { $pkg } specifies repo_depends, but the (direct or indirect) {$count -> 3 | [one] dependency { $deps } is 4 | *[other] dependencies { $deps } are 5 | } not in this repository. 6 | 7 | update_on_build-error = Error while checking update_on_build for %s 8 | 9 | dependency-issue-subject = Dependency issue for %s 10 | dependency-issue-failed = {$count -> 11 | [one] Dependency 12 | *[other] Dependencies 13 | } { $faileddeps } for { $pkg } failed to build. 14 | dependency-issue-failed-this-batch = {$count_deps -> 15 | [one] Dependency { $deps } for { $pkg } is 16 | *[other] Dependencies { $deps } for { $pkg } are 17 | } missing, among which {$count_failed -> 18 | [one] { $faileddeps } has 19 | *[other] { $faileddeps } have 20 | } failed this time. 21 | 22 | aur-submit-error = Failed to submit %s to AUR 23 | 24 | package-staged-subject = { $pkg } { $version } has been packaged just now 25 | package-staged-body = The package has been placed in the staging directory, please check it and then publish manually. 26 | 27 | too-much-output = Too much output, killed. 28 | 29 | log-too-long = Log too long, omitting... 30 | 31 | nvchecker-error-report = nvchecker error report 32 | 33 | github-token-not-set = github token not configured, unable to retrieve Email address from GitHub 34 | github-email-error = Error retrieving maintainer's Email address from GitHub: { $error } 35 | github-email-private = GitHub user { $user } doesn't make their Email address public 36 | unsupported-maintainer-info = Unsupported format: { $info } 37 | 38 | maintainers-error-subject = maintainers for { $pkg } has errors 39 | maintainers-error-body = The following maintainers information has errors, please fix: 40 | 41 | packaging-error-subprocess-subject = Error packaging %s 42 | packaging-error-subprocess = 43 | Command failed! 44 | 45 | Command { $cmd } returned error code { $returncode }. 46 | packaging-error-subprocess-output = Output of the command follows: 47 | packaging-error-traceback = Traceback: 48 | packaging-error-aur-subject = Error retrieving AUR package 49 | packaging-error-aur = Failed to retrieve AUR package! 50 | packaging-error-timeout-subject = Timeout when packaging %s 51 | packaging-error-unknown-subject = Unknown error when packaging %s 52 | packaging-error-unknown = An unknown error happened! Traceback: 53 | packaging-log = Packaging log: 54 | 55 | lilac-yaml-loadding-error = Failed to load lilac.yaml for %s 56 | 57 | package-in-official-group = Packages is added to official groups: { $groups } 58 | package-replacing-official-package = Package is replacing official packages: { $packages } 59 | package-conflicts-with-official-repos = %s conflicts with official repos 60 | package-older-subject = Built package %s has an older version than the one in repo 61 | package-older-body = Package { $pkg } built as version { $built_version }, but there is a version { $repo_version } in repo already. 62 | -------------------------------------------------------------------------------- /lilac2/l10n/en/main.ftl: -------------------------------------------------------------------------------- 1 | runtime-error = Runtime error 2 | runtime-error-traceback = Traceback follows: 3 | 4 | nvchecker-issues-subject = nvchecker issues 5 | nvchecker-issues-body = There are some errors while running nvchecker: 6 | -------------------------------------------------------------------------------- /lilac2/l10n/zh_CN/mail.ftl: -------------------------------------------------------------------------------- 1 | nonexistent-deps-subject = 软件包 { $pkg } 的 lilac.yaml 指定了不存在的依赖 2 | nonexistent-deps-body = 软件包 { $pkg } 的 lilac.yaml 指定了 repo_depends,然而其直接或者间接的依赖项 { $deps } 并不在本仓库中。 3 | 4 | update_on_build-error = %s update_on_build 检查出错 5 | 6 | dependency-issue-subject = %s 出现依赖问题 7 | dependency-issue-failed = { $pkg } 的依赖 { $faileddeps } 打包失败了。 8 | dependency-issue-failed-this-batch = { $pkg } 缺少依赖 { $deps },其中 { $faileddeps } 本次打包失败了。 9 | 10 | aur-submit-error = 提交软件包 %s 到 AUR 时出错 11 | 12 | package-staged-subject = { $pkg } { $version } 刚刚打包了 13 | package-staged-body = 软件包已被置于 staging 目录,请查验后手动发布。 14 | 15 | too-much-output = 输出过多,已击杀。 16 | 17 | log-too-long = 日志过长,省略ing…… 18 | 19 | nvchecker-error-report = nvchecker 错误报告 20 | 21 | github-token-not-set = 未设置 github token,无法从 GitHub 取得用户 Email 地址 22 | github-email-error = 从 GitHub 获取维护者 Email 地址时出错:{ $error } 23 | github-email-private = GitHub 用户 { $user } 未公开 Email 地址 24 | unsupported-maintainer-info = 不支持的格式:{ $info } 25 | 26 | maintainers-error-subject = { $pkg } 的 maintainers 信息有误 27 | maintainers-error-body = 以下 maintainers 信息有误,请修正。 28 | 29 | -traceback-follows = 调用栈如下: 30 | packaging-error-subprocess-subject = 在打包软件包 %s 时发生错误 31 | packaging-error-subprocess = 32 | 命令执行失败! 33 | 34 | 命令 { $cmd } 返回了错误号 { $returncode }。 35 | packaging-error-subprocess-output = 命令的输出如下: 36 | packaging-error-traceback = { -traceback-follows } 37 | packaging-error-aur-subject = 在获取AUR包 %s 时发生错误 38 | packaging-error-aur = 获取AUR包失败! 39 | packaging-error-timeout-subject = 打包软件包 %s 超时 40 | packaging-error-unknown-subject = 在打包软件包 %s 时发生未知错误 41 | packaging-error-unknown = 发生未知错误!{ -traceback-follows } 42 | packaging-log = 打包日志: 43 | 44 | lilac-yaml-loadding-error = 为软件包 %s 载入 lilac.yaml 时失败 45 | 46 | package-in-official-group = 软件包被加入了官方组:{ $groups } 47 | package-replacing-official-package = 软件包将取代官方包:{ $packages } 48 | package-conflicts-with-official-repos = %s 与官方软件库冲突 49 | package-older-subject = %s 新打的包比仓库里的包旧 50 | package-older-body = 包 { $pkg } 打的版本为 { $built_version },但在仓库里已有较新版本 { $repo_version }。 51 | -------------------------------------------------------------------------------- /lilac2/l10n/zh_CN/main.ftl: -------------------------------------------------------------------------------- 1 | runtime-error = 运行时错误 2 | runtime-error-traceback = 调用栈如下: 3 | 4 | nvchecker-issues-subject = nvchecker 问题 5 | nvchecker-issues-body = 在更新检查时出现了一些错误: 6 | -------------------------------------------------------------------------------- /lilac2/lilacpy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | import contextlib 5 | import importlib.util 6 | from pathlib import Path 7 | from typing import Generator, cast 8 | 9 | from .typing import LilacMod 10 | from . import lilacyaml 11 | from . import api 12 | 13 | @contextlib.contextmanager 14 | def load_lilac(dir: Path) -> Generator[LilacMod, None, None]: 15 | try: 16 | spec = importlib.util.spec_from_file_location( 17 | 'lilac.py', dir / 'lilac.py') 18 | if spec is None: 19 | raise RuntimeError('lilac.py spec is None') 20 | mod = importlib.util.module_from_spec(spec) 21 | 22 | yamlconf = lilacyaml.load_lilac_yaml(dir) 23 | g = None 24 | for k, v in yamlconf.items(): 25 | if k.endswith('_script'): 26 | name = k[:-len('_script')] 27 | if name == 'post_build_always': 28 | code = [f'def {name}(success):'] 29 | else: 30 | code = [f'def {name}():'] 31 | for line in v.splitlines(): 32 | code.append(f' {line}') 33 | if g is None: 34 | g = vars(mod) 35 | # "import" lilac2.api 36 | g.update({a: b for a, b in api.__dict__.items() 37 | if not a.startswith('_')}) 38 | code_str = '\n'.join(code) 39 | # run code in `mod` namespace 40 | exec(code_str, g) 41 | else: 42 | setattr(mod, k, v) 43 | 44 | assert spec.loader 45 | with contextlib.suppress(FileNotFoundError): 46 | spec.loader.exec_module(mod) 47 | 48 | mod = cast(LilacMod, mod) 49 | mod.pkgbase = dir.absolute().name 50 | 51 | if hasattr(mod, 'update_on'): 52 | mod.update_on = lilacyaml.parse_update_on(yamlconf['update_on'])[0] 53 | 54 | yield mod 55 | 56 | finally: 57 | with contextlib.suppress(KeyError): 58 | del sys.modules['lilac.py'] 59 | -------------------------------------------------------------------------------- /lilac2/lilacyaml.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any, Iterator, cast 5 | import importlib.resources 6 | import sys 7 | import datetime 8 | 9 | import yaml 10 | 11 | from .vendor.myutils import dehumantime 12 | 13 | from . import api 14 | from .const import _G, PACMAN_DB_DIR 15 | from .typing import LilacInfo, LilacInfos, ExcInfo, NvEntries, OnBuildEntry 16 | 17 | ALIASES: dict[str, Any] 18 | FUNCTIONS: list[str] = [ 19 | 'pre_build', 'post_build', 'post_build_always', 20 | ] 21 | 22 | def _load_aliases() -> None: 23 | global ALIASES 24 | data = importlib.resources.files('lilac2').joinpath('aliases.yaml').read_text() 25 | ALIASES = yaml.safe_load(data) 26 | 27 | _load_aliases() 28 | 29 | def iter_pkgdir(repodir: Path) -> Iterator[Path]: 30 | for x in repodir.iterdir(): 31 | if x.name[0] == '.': 32 | continue 33 | 34 | # leftover files, e.g. __pycache__ stuff 35 | if not (x / 'lilac.yaml').is_file(): 36 | continue 37 | 38 | yield x 39 | 40 | def load_lilac_yaml(dir: Path) -> dict[str, Any]: 41 | with open(dir / 'lilac.yaml') as f: 42 | conf = yaml.safe_load(f) 43 | 44 | if conf is None: 45 | return {} 46 | 47 | depends = conf.get('repo_depends') 48 | if depends: 49 | for i, entry in enumerate(depends): 50 | if isinstance(entry, dict): 51 | depends[i] = next(iter(entry.items())) 52 | else: 53 | depends[i] = entry, entry 54 | makedepends = conf.get('repo_makedepends') 55 | if makedepends: 56 | for i, entry in enumerate(makedepends): 57 | if isinstance(entry, dict): 58 | makedepends[i] = next(iter(entry.items())) 59 | else: 60 | makedepends[i] = entry, entry 61 | 62 | for func in FUNCTIONS: 63 | name = conf.get(func) 64 | if name: 65 | funcvalue = getattr(api, name) 66 | conf[func] = funcvalue 67 | 68 | return conf 69 | 70 | def load_managed_lilacinfos(repodir: Path) -> tuple[LilacInfos, dict[str, ExcInfo]]: 71 | infos: LilacInfos = {} 72 | errors = {} 73 | 74 | for x in iter_pkgdir(repodir): 75 | try: 76 | info = load_lilacinfo(x) 77 | if not info.managed: 78 | continue 79 | if info.time_limit_hours < 0: 80 | raise ValueError('time_limit_hours should be positive.') 81 | infos[x.name] = info 82 | except Exception: 83 | errors[x.name] = cast(ExcInfo, sys.exc_info()) 84 | 85 | return infos, errors 86 | 87 | def load_lilacinfo(dir: Path) -> LilacInfo: 88 | yamlconf = load_lilac_yaml(dir) 89 | if update_on := yamlconf.get('update_on'): 90 | update_ons, throttle_info = parse_update_on(update_on) 91 | else: 92 | update_ons = [] 93 | throttle_info = {} 94 | 95 | return LilacInfo( 96 | pkgbase = dir.absolute().name, 97 | maintainers = yamlconf.get('maintainers', []), 98 | update_on = update_ons, 99 | update_on_build = [OnBuildEntry(**x) for x in yamlconf.get('update_on_build', [])], 100 | throttle_info = throttle_info, 101 | repo_depends = yamlconf.get('repo_depends', []), 102 | repo_makedepends = yamlconf.get('repo_makedepends', []), 103 | time_limit_hours = yamlconf.get('time_limit_hours', 1), 104 | staging = yamlconf.get('staging', False), 105 | managed = yamlconf.get('managed', True), 106 | ) 107 | 108 | def expand_alias_arg(value: str) -> str: 109 | return value.format( 110 | pacman_db_dir = PACMAN_DB_DIR, 111 | repo_name = _G.repo.name, 112 | ) 113 | 114 | def parse_update_on( 115 | update_on: list[dict[str, Any]], 116 | ) -> tuple[NvEntries, dict[int, datetime.timedelta]]: 117 | ret_update: NvEntries = [] 118 | ret_throttle = {} 119 | 120 | for idx, entry in enumerate(update_on): 121 | t = entry.get('lilac_throttle') 122 | if t is not None: 123 | t_secs = dehumantime(t) 124 | ret_throttle[idx] = datetime.timedelta(seconds=t_secs) 125 | 126 | # fix wrong key for 'alpm-lilac' 127 | if entry.get('source') == 'alpm-lilac': 128 | del entry['source'] 129 | entry['alias'] = 'alpm-lilac' 130 | 131 | alias = entry.pop('alias', None) 132 | 133 | # fill alpm-lilac parameters 134 | if alias == 'alpm-lilac': 135 | entry['source'] = 'alpm' 136 | entry.setdefault('dbpath', str(PACMAN_DB_DIR)) 137 | entry.setdefault('repo', _G.repo.name) 138 | 139 | elif alias is not None: 140 | for k, v in ALIASES[alias].items(): 141 | if isinstance(v, str): 142 | entry.setdefault(k, expand_alias_arg(v)) 143 | else: 144 | entry.setdefault(k, v) 145 | 146 | # fill our dbpath if not provided 147 | source = entry.get('source') 148 | if source == 'alpm' or source == 'alpmfiles': 149 | entry.setdefault('dbpath', str(PACMAN_DB_DIR)) 150 | 151 | ret_update.append(entry) 152 | 153 | return ret_update, ret_throttle 154 | 155 | -------------------------------------------------------------------------------- /lilac2/mail.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import smtplib 4 | from typing import Union, Type, List, Dict, Any 5 | 6 | from .vendor.mailutils import assemble_mail 7 | from . import intl 8 | 9 | SMTPClient = Union[smtplib.SMTP, smtplib.SMTP_SSL] 10 | 11 | class MailService: 12 | def __init__(self, config: Dict[str, Any]) -> None: 13 | self.smtp_config = config['smtp'] 14 | self.mailtag = config['lilac']['name'] 15 | self.send_email = config['lilac']['send_email'] 16 | 17 | myname = config['lilac']['name'] 18 | myaddress = config['lilac']['email'] 19 | self.from_ = f'{myname} <{myaddress}>' 20 | self.unsub = config['lilac'].get('unsubscribe_address') 21 | 22 | def smtp_connect(self) -> SMTPClient: 23 | config = self.smtp_config 24 | host = config.get('host', '') 25 | port = config.get('port', 0) 26 | username = config.get('username') 27 | password = config.get('password') 28 | smtp_cls: Type[SMTPClient] 29 | if config.get('use_ssl', False): 30 | smtp_cls = smtplib.SMTP_SSL 31 | else: 32 | smtp_cls = smtplib.SMTP 33 | connection = smtp_cls(host, port) 34 | if not host: 35 | # __init__ doesn't connect; let's do it 36 | connection.connect() 37 | if username and password: 38 | connection.login(username, password) 39 | return connection 40 | 41 | def sendmail(self, to: Union[str, List[str]], 42 | subject: str, msg: str) -> None: 43 | if not self.send_email: 44 | return 45 | 46 | s = self.smtp_connect() 47 | if len(msg) > 5 * 1024 ** 2: 48 | l10n = intl.get_l10n('mail') 49 | too_long = l10n.format_value('log-too-long') 50 | msg = msg[:1024 ** 2] + '\n\n' + too_long + '\n\n' + \ 51 | msg[-1024 ** 2:] 52 | mail = assemble_mail('[%s] %s' % ( 53 | self.mailtag, subject), to, self.from_, text=msg) 54 | if self.unsub: 55 | mail['List-Unsubscribe'] = f'' 56 | s.send_message(mail) 57 | s.quit() 58 | 59 | -------------------------------------------------------------------------------- /lilac2/mediawiki2pkgbuild.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from urllib.parse import quote 3 | 4 | import requests 5 | 6 | from .vendor.htmlutils import parse_document_from_requests 7 | 8 | template = '''\ 9 | _name={name} 10 | pkgname=mediawiki-{name_lower} 11 | pkgver={version} 12 | pkgrel=1 13 | pkgdesc="MediaWiki extension that {desc}" 14 | arch=(any) 15 | url="https://www.mediawiki.org/wiki/Extension:{name}" 16 | license=('{license}') 17 | depends=('mediawiki>={mwver_cur}' 'mediawiki<{mwver_next}') 18 | source=("$_name-$pkgver-$pkgrel.tar.gz::{link}") 19 | sha256sums=() 20 | 21 | build() {{ 22 | true 23 | }} 24 | 25 | package() {{ 26 | cd "$srcdir" 27 | mkdir -p "$pkgdir/usr/share/webapps/mediawiki/extensions/" 28 | cp -ar $_name "$pkgdir/usr/share/webapps/mediawiki/extensions/" 29 | }} 30 | ''' 31 | 32 | URL = 'https://www.mediawiki.org/wiki/Special:ExtensionDistributor?extdistname=%s&extdistversion=REL%s' 33 | def get_link(name: str, mwver: str, s: requests.Session) -> str: 34 | url = URL % (quote(name), mwver.replace('.', '_')) 35 | doc = parse_document_from_requests(url, s) 36 | link = doc.xpath('//a[starts-with(@href, "https://extdist.wmflabs.org/dist/extensions/")]')[0] 37 | return link.get('href') 38 | 39 | def gen_pkgbuild( 40 | name: str, 41 | mwver: str, 42 | desc: str, 43 | license: str | list[str], 44 | s: requests.Session, 45 | ) -> str: 46 | major, minor = mwver.split('.') 47 | mwver_next = f'{major}.{int(minor)+1}' 48 | link = get_link(name, mwver, s) 49 | if isinstance(license, str): 50 | license = [license] 51 | license_str = ' '.join(f"'{x}'" for x in license) 52 | vars = { 53 | 'name': name, 54 | 'name_lower': name.lower(), 55 | 'version': datetime.datetime.now(tz=datetime.UTC).strftime('%Y%m%d'), 56 | 'desc': desc[0].lower() + desc[1:], 57 | 'link': link, 58 | 'mwver_cur': mwver, 59 | 'mwver_next': mwver_next, 60 | 'license': license_str, 61 | } 62 | return template.format_map(vars) 63 | -------------------------------------------------------------------------------- /lilac2/nomypy.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | 3 | from typing import Union, Optional 4 | 5 | from .typing import OnBuildEntry 6 | 7 | class SumType: 8 | _intermediate = True 9 | 10 | def __init__(self) -> None: 11 | if self.__class__.__dict__.get('_intermediate', False): 12 | raise TypeError('use subclasses') 13 | 14 | def __init_subclass__(cls): 15 | if not cls.__dict__.get('_intermediate', False): 16 | setattr(cls.__mro__[1], cls.__name__, cls) 17 | 18 | def __repr__(self) -> str: 19 | cname = self.__class__.__mro__[1].__name__ 20 | name = self.__class__.__name__ 21 | if e := self._extra_info(): 22 | return f'<{cname}.{name}: {e}>' 23 | else: 24 | return f'<{cname}.{name}>' 25 | 26 | def _extra_info(self): 27 | return '' 28 | 29 | class BuildResult(SumType): 30 | _intermediate = True 31 | rusage = None 32 | elapsed = 0 33 | 34 | def __bool__(self) -> bool: 35 | return self.__class__ in [self.successful, self.staged] 36 | 37 | def _extra_info(self): 38 | return f'rusage={self.rusage}' 39 | 40 | class successful(BuildResult): 41 | pass 42 | 43 | class staged(BuildResult): 44 | pass 45 | 46 | class failed(BuildResult): 47 | def __init__(self, error: Union[Exception, str]) -> None: 48 | self.error = error 49 | 50 | def _extra_info(self) -> str: 51 | if isinstance(self.error, Exception): 52 | msg = repr(self.error) 53 | else: 54 | msg = self.error 55 | return f'{msg}; {super()._extra_info()}' 56 | 57 | class skipped(BuildResult): 58 | def __init__(self, reason: str) -> None: 59 | self.reason = reason 60 | 61 | def _extra_info(self) -> str: 62 | return f'{self.reason!r}; {super()._extra_info()}' 63 | 64 | del successful, staged, failed, skipped 65 | 66 | class BuildReason(SumType): 67 | _intermediate = True 68 | 69 | def to_dict(self) -> str: 70 | d = {k: v for k, v in self.__dict__.items() 71 | if not k.startswith('_')} 72 | d['name'] = self.__class__.__name__ 73 | return d 74 | 75 | class NvChecker(BuildReason): 76 | def __init__( 77 | self, 78 | items: list[tuple[int, str]], 79 | changes: list[tuple[str, str]], 80 | ) -> None: 81 | '''items: list of (nvchecker entry index, source name) 82 | changes: list of (oldver, newver)''' 83 | self.items = items 84 | self.changes = changes 85 | 86 | def _extra_info(self) -> str: 87 | return f'items={self.items!r}, changes={self.changes!r}' 88 | 89 | def __str__(self): 90 | return 'nvchecker detects the following updates: ' + ', '.join( 91 | f'{v}({k}): {old} -> {new}' for (k, v), (old, new) 92 | in zip(self.items, self.changes) 93 | ) 94 | 95 | class UpdatedFailed(BuildReason): 96 | '''previously failed package gets updated''' 97 | 98 | def __str__(self): 99 | return 'it failed last time and has been updated' 100 | 101 | class UpdatedPkgrel(BuildReason): 102 | def __str__(self): 103 | return 'the pkgrel has been updated' 104 | 105 | class Depended(BuildReason): 106 | def __init__(self, depender): 107 | self.depender = depender 108 | 109 | def _extra_info(self) -> str: 110 | return self.depender 111 | 112 | def __str__(self): 113 | return f'{self.depender} depends on it' 114 | 115 | class FailedByDeps(BuildReason): 116 | def __init__(self, deps: tuple[str]) -> None: 117 | self.deps = deps 118 | 119 | def __str__(self): 120 | return f'it depends on {', '.join(self.deps)} and they have been built' 121 | 122 | class Cmdline(BuildReason): 123 | def __init__(self, runner: Optional[str]) -> None: 124 | self.runner = runner 125 | 126 | def _extra_info(self) -> str: 127 | if self.runner: 128 | return repr(self.runner) 129 | else: 130 | return '' 131 | 132 | def __str__(self): 133 | if self.runner: 134 | return f'{self.runner} has requested to build it' 135 | else: 136 | return 'it is requested on the command line' 137 | 138 | class OnBuild(BuildReason): 139 | def __init__(self, update_on_build: list[OnBuildEntry]) -> None: 140 | self.update_on_build = update_on_build 141 | 142 | def _extra_info(self) -> str: 143 | return repr(self.update_on_build) 144 | 145 | def __str__(self): 146 | if len(self.update_on_build) == 1: 147 | subj = self.update_on_build[0].pkgbase + ' has' 148 | else: 149 | subj = ', '.join(x.pkgbase for x in self.update_on_build) 150 | if len(subj) > 100: 151 | subj = subj[:100].rstrip() + '...' 152 | subj += ' have' 153 | return f'{subj} been built' 154 | 155 | def to_dict(self) -> str: 156 | d = { 157 | 'update_on_build': [{ 158 | k: v for k, v in x.__dict__.items() if v is not None 159 | } for x in self.update_on_build] 160 | } 161 | 162 | d['name'] = self.__class__.__name__ 163 | return d 164 | 165 | del NvChecker, UpdatedFailed, UpdatedPkgrel, Depended, FailedByDeps, Cmdline, OnBuild 166 | -------------------------------------------------------------------------------- /lilac2/nvchecker.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import logging 5 | from collections import defaultdict, UserList 6 | import subprocess 7 | import json 8 | from pathlib import Path 9 | from typing import ( 10 | List, NamedTuple, Tuple, Set, Dict, 11 | Optional, Any, Union, Iterable, TYPE_CHECKING, 12 | DefaultDict 13 | ) 14 | 15 | import tomli_w 16 | 17 | from .cmd import run_cmd 18 | from .const import mydir 19 | from .typing import LilacInfos, PathLike 20 | from .tools import reap_zombies 21 | from . import intl 22 | 23 | if TYPE_CHECKING: 24 | from .repo import Repo, Maintainer 25 | del Repo, Maintainer 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | NVCHECKER_FILE: Path = mydir / 'nvchecker.toml' 30 | KEY_FILE: Path = mydir / 'nvchecker_keyfile.toml' 31 | OLDVER_FILE = mydir / 'oldver' 32 | NEWVER_FILE = mydir / 'newver' 33 | 34 | class NvResult(NamedTuple): 35 | oldver: Optional[str] 36 | newver: Optional[str] 37 | 38 | class NvResults(UserList): 39 | data: List[NvResult] 40 | 41 | def to_list(self) -> list[tuple[Optional[str], Optional[str]]]: 42 | return [tuple(x) for x in self.data] # type: ignore 43 | 44 | @classmethod 45 | def from_list(cls, l) -> NvResults: 46 | return cls([NvResult(o, n) for o, n in l]) 47 | 48 | @property 49 | def oldver(self) -> Optional[str]: 50 | if self.data: 51 | return self.data[0].oldver 52 | return None 53 | 54 | @property 55 | def newver(self) -> Optional[str]: 56 | if self.data: 57 | return self.data[0].newver 58 | return None 59 | 60 | def _gen_config_from_lilacinfos( 61 | infos: LilacInfos, 62 | ) -> Tuple[Dict[str, Any], Dict[str, int], Dict[str, str]]: 63 | errors = {} 64 | newconfig = {} 65 | counts = {} 66 | for name, info in infos.items(): 67 | confs = info.update_on 68 | if not confs: 69 | errors[name] = 'unknown' 70 | continue 71 | 72 | for i, conf in enumerate(confs): 73 | if not isinstance(conf, dict): 74 | errors[name] = 'not array of dicts' 75 | break 76 | if i == 0: 77 | newconfig[f'{name}'] = conf 78 | else: 79 | newconfig[f'{name}:{i}'] = conf 80 | 81 | for key, value in conf.items(): 82 | if key == 'to_pattern': 83 | # TOML doesn't have None, but YAML doesn't distinguish '' and None 84 | if value is None: 85 | conf[key] = '' 86 | elif value in [None, '']: 87 | # compat with old config convention 88 | conf[key] = name 89 | counts[name] = len(confs) 90 | 91 | return newconfig, counts, errors 92 | 93 | def packages_need_update( 94 | repo: Repo, 95 | proxy: Optional[str] = None, 96 | care_pkgs: set[str] = set(), 97 | ) -> Tuple[Dict[str, NvResults], Set[str], Set[str]]: 98 | if care_pkgs: 99 | lilacinfos = {k: v for k, v in repo.lilacinfos.items() if k in care_pkgs} 100 | else: 101 | lilacinfos = repo.lilacinfos 102 | newconfig, update_on_counts, update_on_errors = _gen_config_from_lilacinfos(lilacinfos) 103 | 104 | if not OLDVER_FILE.exists(): 105 | open(OLDVER_FILE, 'a').close() 106 | 107 | newconfig['__config__'] = { 108 | 'oldver': str(OLDVER_FILE), 109 | 'newver': str(NEWVER_FILE), 110 | } 111 | if proxy: 112 | newconfig['__config__']['proxy'] = proxy 113 | 114 | with open(NVCHECKER_FILE, 'wb') as f: 115 | tomli_w.dump(newconfig, f) 116 | 117 | # vcs source needs to be run in the repo, so cwd=... 118 | rfd, wfd = os.pipe() 119 | cmd: List[Union[str, PathLike]] = [ 120 | 'nvchecker', '--logger', 'both', '--json-log-fd', str(wfd), 121 | '-c', NVCHECKER_FILE] 122 | if KEY_FILE.exists(): 123 | cmd.extend(['--keyfile', KEY_FILE]) 124 | 125 | env = os.environ.copy() 126 | env['PYTHONPATH'] = str(Path(__file__).resolve().parent.parent) 127 | env['PYTHONNODEBUGRANGES'] = '1' 128 | 129 | logger.info('Running nvchecker...') 130 | process = subprocess.Popen( 131 | cmd, cwd=repo.repodir, pass_fds=(wfd,), env=env) 132 | os.close(wfd) 133 | 134 | output = os.fdopen(rfd) 135 | # pkgbase => index => NvResult 136 | nvdata_nested: Dict[str, Dict[int, NvResult]] = {} 137 | errors: DefaultDict[Optional[str], List[Dict[str, Any]]] = defaultdict(list) 138 | rebuild = set() 139 | for l in output: 140 | j = json.loads(l) 141 | pkg = j.get('name') 142 | if pkg and ':' in pkg: 143 | pkg, i = pkg.split(':', 1) 144 | i = int(i) 145 | else: 146 | i = 0 147 | if pkg not in nvdata_nested: 148 | nvdata_nested[pkg] = {} 149 | 150 | event = j['event'] 151 | if event == 'updated': 152 | nvdata_nested[pkg][i] = NvResult(j['old_version'], j['version']) 153 | if i != 0: 154 | rebuild.add(pkg) 155 | elif event == 'up-to-date': 156 | nvdata_nested[pkg][i] = NvResult(j['version'], j['version']) 157 | elif j['level'] in ['warning', 'warn', 'error', 'exception', 'critical']: 158 | errors[pkg].append(j) 159 | 160 | # don't rebuild if part of its checks have failed 161 | rebuild -= errors.keys() 162 | 163 | ret = process.wait() 164 | reap_zombies() 165 | if ret != 0: 166 | raise subprocess.CalledProcessError(ret, cmd) 167 | 168 | error_owners: DefaultDict[Maintainer, List[Dict[str, Any]]] = defaultdict(list) 169 | for pkg, pkgerrs in errors.items(): 170 | if pkg is None: 171 | continue 172 | pkg = pkg.split(':', 1)[0] 173 | 174 | maintainers = repo.find_maintainers(lilacinfos[pkg]) 175 | for maintainer in maintainers: 176 | error_owners[maintainer].extend(pkgerrs) 177 | 178 | for pkg, error in update_on_errors.items(): 179 | maintainers = repo.find_maintainers(lilacinfos[pkg]) 180 | for maintainer in maintainers: 181 | error_owners[maintainer].append({ 182 | 'name': pkg, 183 | 'error': error, 184 | 'event': 'wrong or missing `update_on` config', 185 | }) 186 | 187 | l10n = intl.get_l10n('mail') 188 | for who, their_errors in error_owners.items(): 189 | logger.warning('send nvchecker report for %r packages to %s', 190 | {x['name'] for x in their_errors}, who) 191 | repo.sendmail(who, l10n.format_value('nvchecker-error-report'), 192 | '\n'.join(_format_error(e) for e in their_errors)) 193 | 194 | if None in errors: # errors belong to unknown packages 195 | l10n = intl.get_l10n('main') 196 | subject = l10n.format_value('nvchecker-issues-subject') 197 | msg = l10n.format_value('nvchecker-issues-body') + '\n\n' + '\n'.join( 198 | _format_error(e) for e in errors[None]) + '\n' 199 | repo.send_repo_mail(subject, msg) 200 | 201 | nvdata: Dict[str, NvResults] = {} 202 | 203 | for pkgbase, d in nvdata_nested.items(): 204 | if pkgbase is None: 205 | # from events without a name 206 | continue 207 | n = update_on_counts[pkgbase] 208 | nrs = nvdata[pkgbase] = NvResults() 209 | for i in range(n): 210 | if i in d: 211 | nrs.append(d[i]) 212 | else: 213 | # item at this index has failed; insert a dummy one 214 | nrs.append(NvResult(None, None)) 215 | 216 | for pkgbase in lilacinfos: 217 | if pkgbase not in nvdata: 218 | # we know nothing about these versions 219 | # maybe nvchecker has failed 220 | nvdata[pkgbase] = NvResults() 221 | 222 | return nvdata, set(update_on_errors.keys()), rebuild 223 | 224 | def _format_error(error) -> str: 225 | if 'exception' in error: 226 | exception = error['exception'] 227 | error = error.copy() 228 | del error['exception'] 229 | else: 230 | exception = None 231 | 232 | ret = json.dumps(error, ensure_ascii=False) 233 | if exception: 234 | ret += '\n' + exception + '\n' 235 | return ret 236 | 237 | def nvtake(L: Iterable[str], infos: LilacInfos) -> None: 238 | names: List[str] = [] 239 | for name in L: 240 | confs = infos[name].update_on 241 | if confs: 242 | names += [f'{name}:{i}' for i in range(len(confs))] 243 | names[-len(confs)] = name 244 | else: 245 | names.append(name) 246 | 247 | run_cmd(['nvtake', '--ignore-nonexistent', '-c', NVCHECKER_FILE] # type: ignore 248 | + names) 249 | # mypy can't infer List[Union[str, Path]] 250 | # and can't understand List[str] is a subtype of it 251 | -------------------------------------------------------------------------------- /lilac2/packages.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections import defaultdict, namedtuple 4 | from pathlib import Path 5 | from typing import Dict, Union, Tuple, Set, Optional, DefaultDict 6 | import re 7 | import graphlib 8 | from contextlib import suppress 9 | import logging 10 | 11 | from .vendor import archpkg 12 | 13 | from .api import run_cmd 14 | from .typing import LilacInfos 15 | from . import lilacyaml 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | def get_dependency_map( 20 | depman: DependencyManager, lilacinfos: LilacInfos, 21 | ) -> Tuple[Dict[str, Set[Dependency]], Dict[str, Set[Dependency]]]: 22 | '''compute ordered, complete dependency relations between pkgbases (the directory names) 23 | 24 | This function does not make use of pkgname because they maybe the same for 25 | different pkgdir. Those are carried by Dependency and used elsewhere. 26 | 27 | The first returned dict has the complete set of dependencies of the given pkgbase, including 28 | build-time dependencies of other dependencies. The second dict has only the dependnecies 29 | required to be installed in the build chroot. For example, if A depends on B, and B makedepends 30 | on C, then the first dict has "A: {B, C}" while the second dict has only "A: {B}". 31 | ''' 32 | map: DefaultDict[str, Set[Dependency]] = defaultdict(set) 33 | pkgdir_map: DefaultDict[str, Set[str]] = defaultdict(set) 34 | rmap: DefaultDict[str, Set[str]] = defaultdict(set) 35 | 36 | # same as above maps, but contain only normal dependencies, not makedepends or checkdepends 37 | norm_map: DefaultDict[str, Set[Dependency]] = defaultdict(set) 38 | norm_pkgdir_map: DefaultDict[str, Set[str]] = defaultdict(set) 39 | norm_rmap: DefaultDict[str, Set[str]] = defaultdict(set) 40 | 41 | for pkgbase, info in lilacinfos.items(): 42 | for d in info.repo_depends: 43 | d = depman.get(d) 44 | 45 | pkgdir_map[pkgbase].add(d.pkgdir.name) 46 | rmap[d.pkgdir.name].add(pkgbase) 47 | map[pkgbase].add(d) 48 | 49 | norm_pkgdir_map[pkgbase].add(d.pkgdir.name) 50 | norm_rmap[d.pkgdir.name].add(pkgbase) 51 | norm_map[pkgbase].add(d) 52 | 53 | for d in info.repo_makedepends: 54 | d = depman.get(d) 55 | 56 | pkgdir_map[pkgbase].add(d.pkgdir.name) 57 | rmap[d.pkgdir.name].add(pkgbase) 58 | map[pkgbase].add(d) 59 | 60 | dep_order = graphlib.TopologicalSorter(pkgdir_map).static_order() 61 | for pkgbase in dep_order: 62 | if pkgbase in rmap: 63 | deps = map[pkgbase] 64 | dependers = rmap[pkgbase] 65 | for dd in dependers: 66 | map[dd].update(deps) 67 | if pkgbase in norm_rmap: 68 | deps = norm_map[pkgbase] 69 | dependers = norm_rmap[pkgbase] 70 | for dd in dependers: 71 | norm_map[dd].update(deps) 72 | 73 | build_dep_map: DefaultDict[str, Set[Dependency]] = defaultdict(set) 74 | for pkgbase, info in lilacinfos.items(): 75 | build_deps = build_dep_map[pkgbase] 76 | build_deps.update(norm_map[pkgbase]) 77 | for d in info.repo_makedepends: 78 | d = depman.get(d) 79 | build_deps.add(d) 80 | build_deps.update(norm_map[d.pkgdir.name]) 81 | 82 | return map, build_dep_map 83 | 84 | _DependencyTuple = namedtuple( 85 | '_DependencyTuple', 'pkgdir pkgname') 86 | 87 | class Dependency(_DependencyTuple): 88 | pkgdir: Path 89 | pkgname: str 90 | 91 | def resolve(self) -> Optional[Path]: 92 | try: 93 | files = [x for x in self.pkgdir.iterdir() 94 | if x.name.endswith(('.pkg.tar.xz', '.pkg.tar.zst'))] 95 | except FileNotFoundError: 96 | return None 97 | 98 | pkgs = [] 99 | for x in files: 100 | info = archpkg.PkgNameInfo.parseFilename(x.name) 101 | if info.name == self.pkgname: 102 | pkgs.append(x) 103 | 104 | if len(pkgs) == 1: 105 | return pkgs[0] 106 | elif not pkgs: 107 | return None 108 | else: 109 | ret = max(pkgs, key=lambda x: x.stat().st_mtime) 110 | return ret 111 | 112 | class DependencyManager: 113 | _CACHE: Dict[str, Dependency] = {} 114 | 115 | def __init__(self, repodir: Path) -> None: 116 | self.repodir = repodir 117 | 118 | def get(self, what: Union[str, Tuple[str, str]]) -> Dependency: 119 | if isinstance(what, tuple): 120 | pkgbase, pkgname = what 121 | else: 122 | pkgbase = pkgname = what 123 | 124 | key = '/'.join((pkgbase, pkgname)) 125 | if key not in self._CACHE: 126 | self._CACHE[key] = Dependency( 127 | self.repodir / pkgbase, pkgname) 128 | return self._CACHE[key] 129 | 130 | def get_changed_packages(from_: str, to: str) -> Set[str]: 131 | cmd = ["git", "diff", "--name-only", '--relative', from_, to] 132 | r = run_cmd(cmd).splitlines() 133 | ret = {x.split('/', 1)[0] for x in r} 134 | return ret 135 | 136 | _re_package = re.compile(r'package(?:_(.+))?\(') 137 | 138 | def get_package_names(pkgdir: Path) -> Set[Tuple[str, str]]: 139 | packages: Set[Tuple[str, str]] = set() 140 | 141 | pkgbase = pkgdir.name 142 | 143 | pkgfile = pkgdir / 'package.list' 144 | if pkgfile.exists(): 145 | with open(pkgfile) as f: 146 | packages.update((pkgbase, l.rstrip()) for l in f if not l.startswith('#')) 147 | return packages 148 | 149 | found = False 150 | with suppress(FileNotFoundError), open(pkgdir / 'PKGBUILD') as f: 151 | for l in f: 152 | if m := _re_package.match(l): 153 | found = True 154 | if m.group(1): 155 | packages.add((pkgbase, m.group(1).strip())) 156 | else: 157 | packages.add((pkgbase, pkgbase)) 158 | if not found: 159 | packages.add((pkgbase, pkgbase)) 160 | return packages 161 | 162 | def get_all_pkgnames(repodir: Path) -> Set[Tuple[str, str]]: 163 | packages: Set[Tuple[str, str]] = set() 164 | for pkgdir in lilacyaml.iter_pkgdir(repodir): 165 | packages.update(get_package_names(pkgdir)) 166 | return packages 167 | 168 | def get_built_package_files(pkgdir: Path) -> list[Path]: 169 | names = [x[1] for x in get_package_names(pkgdir)] 170 | names += [x + '-debug' for x in names] 171 | ret = [] 172 | for file in pkgdir.iterdir(): 173 | if file.name.endswith(('.pkg.tar.xz', '.pkg.tar.zst')): 174 | try: 175 | info = archpkg.PkgNameInfo.parseFilename(file.name) 176 | except TypeError: 177 | logger.warning('unrecognized package file: %r', file) 178 | continue 179 | if info.name in names: 180 | ret.append(file) 181 | return ret 182 | -------------------------------------------------------------------------------- /lilac2/pkgbuild.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | # PKGBUILD related stuff that lilac uses (excluding APIs) 4 | 5 | import os 6 | import time 7 | import subprocess 8 | from typing import Dict, List, Optional, Union 9 | from pathlib import Path 10 | from contextlib import suppress 11 | 12 | import pyalpm 13 | 14 | from .vendor.myutils import safe_overwrite 15 | 16 | from .const import _G, OFFICIAL_REPOS 17 | from .cmd import UNTRUSTED_PREFIX 18 | from .typing import PkgVers 19 | 20 | _official_packages: Dict[str, int] = {} 21 | _official_groups: Dict[str, int] = {} 22 | _repo_package_versions: Dict[str, str] = {} 23 | 24 | class ConflictWithOfficialError(Exception): 25 | def __init__(self, groups, packages): 26 | self.groups = groups 27 | self.packages = packages 28 | 29 | class DowngradingError(Exception): 30 | def __init__(self, pkgname, built_version, repo_version): 31 | self.pkgname = pkgname 32 | self.built_version = built_version 33 | self.repo_version = repo_version 34 | 35 | def _load_timed_dict( 36 | path: os.PathLike, deadline: int, 37 | ) -> Dict[str, int]: 38 | data = {} 39 | with suppress(FileNotFoundError), open(path) as f: 40 | for line in f: 41 | name, t_str = line.split(None, 1) 42 | t = int(t_str) 43 | if t >= deadline: 44 | data[name] = t 45 | 46 | return data 47 | 48 | def _save_timed_dict( 49 | path: os.PathLike, data: Dict[str, int], 50 | ) -> None: 51 | data_str = ''.join(f'{k} {v}\n' for k, v in data.items()) 52 | safe_overwrite(str(path), data_str, mode='w') 53 | 54 | def update_pacmandb(dbpath: Path, pacman_conf: Optional[str] = None, 55 | *, quiet: bool = False) -> None: 56 | stdout = subprocess.DEVNULL if quiet else None 57 | 58 | for update_arg in ['-Sy', '-Fy']: 59 | 60 | cmd: List[Union[str, Path]] = [ 61 | 'fakeroot', 'pacman', update_arg, '--dbpath', dbpath, 62 | '--disable-sandbox', # unusable without root 63 | ] 64 | if pacman_conf is not None: 65 | cmd += ['--config', pacman_conf] 66 | 67 | for _ in range(3): 68 | p = subprocess.run(cmd, stdout = stdout) 69 | if p.returncode == 0: 70 | break 71 | else: 72 | p.check_returncode() 73 | 74 | def update_data(dbpath: Path, pacman_conf: Optional[str], 75 | *, quiet: bool = False) -> None: 76 | update_pacmandb(dbpath, pacman_conf, quiet=quiet) 77 | 78 | now = int(time.time()) 79 | deadline = now - 90 * 86400 80 | pkgs = _load_timed_dict(dbpath / 'packages.txt', deadline) 81 | groups = _load_timed_dict(dbpath / 'groups.txt', deadline) 82 | 83 | H = pyalpm.Handle('/', str(dbpath)) 84 | for repo in OFFICIAL_REPOS: 85 | db = H.register_syncdb(repo, 0) 86 | pkgs.update((p.name, now) for p in db.pkgcache) 87 | groups.update((g[0], now) for g in db.grpcache) 88 | 89 | _save_timed_dict(dbpath / 'packages.txt', pkgs) 90 | _save_timed_dict(dbpath / 'groups.txt', groups) 91 | 92 | def load_data(dbpath: Path) -> None: 93 | global _repo_package_versions 94 | 95 | now = int(time.time()) 96 | deadline = now - 90 * 86400 97 | _official_packages.update( 98 | _load_timed_dict(dbpath / 'packages.txt', deadline)) 99 | _official_groups.update( 100 | _load_timed_dict(dbpath / 'groups.txt', deadline)) 101 | 102 | if hasattr(_G, 'repo'): 103 | H = pyalpm.Handle('/', str(dbpath)) 104 | db = H.register_syncdb(_G.repo.name, 0) 105 | _repo_package_versions = {p.name: p.version for p in db.pkgcache} 106 | 107 | def check_srcinfo() -> PkgVers: 108 | srcinfo = get_srcinfo().decode('utf-8').splitlines() 109 | bad_groups = [] 110 | bad_packages = [] 111 | pkgnames = [] 112 | 113 | for line in srcinfo: 114 | line = line.strip() 115 | if line.startswith('groups = '): 116 | g = line.split()[-1] 117 | if g in _official_groups: 118 | bad_groups.append(g) 119 | elif line.startswith('replaces = '): 120 | pkg = line.split()[-1] 121 | if pkg in _official_packages: 122 | bad_packages.append(pkg) 123 | elif line.startswith('pkgname = '): 124 | pkgnames.append(line.split()[-1]) 125 | 126 | pkgvers = _get_package_version(srcinfo) 127 | 128 | # check if the newly built package is older than the existing 129 | # package in repos or not 130 | built_version = str(pkgvers) 131 | for pkgname in pkgnames: 132 | try: 133 | repo_version = _repo_package_versions[pkgname] 134 | if pyalpm.vercmp(built_version, repo_version) < 0: 135 | raise DowngradingError(pkgname, built_version, repo_version) 136 | except KeyError: 137 | # the newly built package is not in repos yet - fine 138 | pass 139 | 140 | if bad_groups or bad_packages: 141 | raise ConflictWithOfficialError(bad_groups, bad_packages) 142 | 143 | return pkgvers 144 | 145 | def get_srcinfo() -> bytes: 146 | pwd = os.getcwd() 147 | basename = os.path.basename(pwd) 148 | # makepkg wants *.install file and write permissions to simply print out info :-( 149 | extra_binds = ['--bind', pwd, f'/tmp/{basename}', '--chdir', f'/tmp/{basename}'] 150 | out = subprocess.check_output( 151 | UNTRUSTED_PREFIX + extra_binds + ['makepkg', '--printsrcinfo'], # type: ignore 152 | ) 153 | return out 154 | 155 | def _get_package_version(srcinfo: List[str]) -> PkgVers: 156 | epoch = pkgver = pkgrel = None 157 | 158 | for line in srcinfo: 159 | line = line.strip() 160 | if not epoch and line.startswith('epoch = '): 161 | epoch = line.split()[-1] 162 | elif not pkgver and line.startswith('pkgver = '): 163 | pkgver = line.split()[-1] 164 | elif not pkgrel and line.startswith('pkgrel = '): 165 | pkgrel = line.split()[-1] 166 | 167 | assert pkgver is not None 168 | assert pkgrel is not None 169 | return PkgVers(epoch, pkgver, pkgrel) 170 | -------------------------------------------------------------------------------- /lilac2/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/archlinuxcn/lilac/e7df53c8c7933a1457fce4f3cfd448b5fbdc4371/lilac2/py.typed -------------------------------------------------------------------------------- /lilac2/pypi2pkgbuild.py: -------------------------------------------------------------------------------- 1 | import json 2 | import urllib.request 3 | from typing import ( 4 | Dict, Optional, Iterable, List, Any, Tuple, 5 | ) 6 | 7 | template = '''\ 8 | _name={name} 9 | pkgname={pkgname} 10 | pkgver={pkgver} 11 | pkgrel=1 12 | pkgdesc="{summary}" 13 | arch=({arch}) 14 | url="{home_page}" 15 | license=({license}) 16 | {depends} 17 | {conflicts}{provides}{source} 18 | sha256sums=('{sha256sum}') 19 | {prepare} 20 | build() {{ 21 | {build} 22 | }} 23 | 24 | package() {{ 25 | {package} 26 | 27 | # make sure we don't install any world-writable or root-readable-only files 28 | # we shouldn't need to fix ownership as we extract tarballs as a non-root user 29 | # https://github.com/pypa/setuptools/issues/1328 30 | # https://github.com/LonamiWebs/Telethon/issues/1605 31 | chmod u=rwX,go=rX -R "$pkgdir" 32 | # make sure we don't install annoying files 33 | local _site_packages=$(python -c "import site; print(site.getsitepackages()[0])") 34 | rm -rf "$pkgdir/$_site_packages/tests/" 35 | }} 36 | {check} 37 | ''' 38 | 39 | pkg_license_tmpl = '''\ 40 | install -Dm644 {license_file} "$pkgdir/usr/share/licenses/$pkgname/LICENSE" 41 | ''' 42 | 43 | pkg_whl_tmpl = '''\ 44 | cd "$srcdir" 45 | python -m installer --destdir="$pkgdir" '{whl}' 46 | ''' 47 | 48 | class PyPIException(Exception): pass 49 | 50 | def to_sharray(arr: Iterable[str]) -> str: 51 | return ' '.join(f"'{x}'" for x in arr) 52 | 53 | def get_pypi_info(name: str) -> Dict[str, Any]: 54 | url = f'https://pypi.org/pypi/{name}/json' 55 | res = urllib.request.urlopen(url) 56 | data = res.read().decode('utf-8') 57 | j = json.loads(data) 58 | return j 59 | 60 | def gen_pkgbuild( 61 | pypi_name: str, 62 | pkgname: Optional[str] = None, 63 | depends: Optional[List[str]] = None, 64 | python2: bool = False, 65 | arch: Optional[Iterable[str]] = None, 66 | makedepends: Optional[List[str]] = None, 67 | optdepends: Optional[List[str]] = None, 68 | depends_setuptools: bool = False, 69 | check: Optional[str] = None, 70 | provides: Optional[Iterable[str]] = None, 71 | conflicts: Optional[Iterable[str]] = None, 72 | license: Optional[str] = None, 73 | license_file: Optional[str] = None, 74 | prepare: Optional[str] = None, 75 | pep517: bool = False, 76 | ) -> Tuple[str, str]: 77 | j = get_pypi_info(pypi_name) 78 | version = j['info']['version'] 79 | 80 | source_release: List[Dict[str, Any]] = [] 81 | whl_release: List[Dict[str, Any]] = [] 82 | source_release = [ 83 | x for x in j['releases'][version] 84 | if x['packagetype'] == 'sdist'] 85 | if not source_release: 86 | whl_release = [ 87 | x for x in j['releases'][version] 88 | if x['packagetype'] == 'bdist_wheel'] 89 | if not whl_release: 90 | raise PyPIException('no release of known type') 91 | 92 | if not source_release and license_file: 93 | raise PyPIException('no source code available so cannot install license_file') 94 | 95 | makedepends2 = makedepends or [] 96 | if whl_release: 97 | makedepends2.extend(['python-installer']) 98 | elif pep517: 99 | makedepends2.extend(['python-build', 'python-installer']) 100 | else: 101 | makedepends2.append('python-setuptools') 102 | 103 | depends2 = depends or ['python'] 104 | if depends_setuptools: 105 | depends2.append('python-setuptools') 106 | 107 | depends_str = [] 108 | if depends2: 109 | depends_str.append(f'depends=({to_sharray(depends2)})') 110 | if makedepends2: 111 | depends_str.append( 112 | f'makedepends=({to_sharray(makedepends2)})') 113 | if optdepends: 114 | depends_str.append( 115 | f'optdepends=({to_sharray(optdepends)})') 116 | 117 | src_dir = '$_name-$pkgver' 118 | 119 | if source_release: 120 | r = source_release[-1] 121 | filename = r['filename'] 122 | if filename.endswith('.zip'): 123 | src_dir = filename.removesuffix('.zip') 124 | else: 125 | src_dir = filename.rsplit('.tar', 1)[0] 126 | # filename name may be different from pypi name, e.g. django-post-office 127 | # Use "predictable" URL instead of `r['url']` to make AUR users happy... 128 | source_line = 'source=("https://files.pythonhosted.org/packages/source/${_name::1}/${_name}/%s")' % filename 129 | 130 | if pep517: 131 | build_code = f'''\ 132 | cd "$srcdir/{src_dir}" 133 | python -m build --wheel --no-isolation 134 | ''' 135 | package_code = f'''\ 136 | cd "$srcdir/{src_dir}" 137 | python -m installer --destdir="$pkgdir" dist/*.whl 138 | ''' 139 | else: 140 | build_code = f'''\ 141 | cd "$srcdir/{src_dir}" 142 | python3 setup.py build 143 | ''' 144 | package_code = f'''\ 145 | cd "$srcdir/{src_dir}" 146 | python3 setup.py install --root=$pkgdir --optimize=1 --skip-build 147 | ''' 148 | 149 | if license_file: 150 | package_code += pkg_license_tmpl.format( 151 | license_file = license_file) 152 | 153 | elif whl_release: 154 | r = whl_release[-1] 155 | whl_pyver = r['python_version'] 156 | whl = r['url'].rsplit('/')[-1] 157 | source_line = f'source=("https://files.pythonhosted.org/packages/{whl_pyver}/${{_name::1}}/$_name/{whl}")' 158 | build_code = ' true' 159 | package_code = pkg_whl_tmpl.format(whl=whl) 160 | 161 | if check is not None: 162 | if check == 'nose': 163 | depends_str.append("checkdepends=('python-nose')") 164 | check_code = f''' 165 | check() {{ 166 | cd "$srcdir/{src_dir}" 167 | python -m unittest discover tests 168 | }}''' 169 | else: 170 | raise ValueError('unrecognized check value', check) 171 | else: 172 | check_code = '' 173 | 174 | if prepare is not None: 175 | prepare_code = f''' 176 | prepare() {{ 177 | cd "$srcdir/{src_dir}" 178 | {prepare} 179 | }} 180 | ''' 181 | else: 182 | prepare_code = '' 183 | 184 | vars1 = { 185 | 'name': j['info']['name'], 186 | 'pkgname': pkgname or f'python-{pypi_name.lower()}', 187 | 'pkgver': version, 188 | 'summary': j['info']['summary'], 189 | 'arch': to_sharray(arch) if arch else 'any', 190 | 'home_page': j['info']['home_page'] or j['info']['project_urls'].get('Homepage', ''), 191 | 'license': license or "'%s'" % j['info']['license'].replace("'", ''), 192 | 'depends': '\n'.join(depends_str), 193 | 'provides': f'provides=({to_sharray(provides)})\n' if provides else '', 194 | 'conflicts': f'conflicts=({to_sharray(conflicts)})\n' if conflicts else '', 195 | 'source': source_line, 196 | 'sha256sum': r['digests']['sha256'], 197 | 'build': build_code.rstrip(), 198 | 'package': package_code.rstrip(), 199 | 'check': check_code.rstrip(), 200 | 'prepare': prepare_code, 201 | } 202 | 203 | pkgbuild = template.format_map(vars1) 204 | return version, pkgbuild 205 | 206 | -------------------------------------------------------------------------------- /lilac2/repo.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | from pathlib import Path 6 | from typing import ( 7 | Optional, Tuple, List, Union, Dict, TYPE_CHECKING, Any, 8 | ) 9 | import logging 10 | from functools import lru_cache 11 | import traceback 12 | import string 13 | import time 14 | from contextlib import suppress 15 | 16 | import structlog 17 | 18 | from .vendor.github import GitHub 19 | 20 | from .mail import MailService 21 | from .tools import ansi_escape_re 22 | from . import api, lilacyaml, intl 23 | from .typing import LilacMod, Maintainer, LilacInfos, LilacInfo 24 | from .nomypy import BuildResult # type: ignore 25 | if TYPE_CHECKING: 26 | from .packages import Dependency 27 | del Dependency 28 | 29 | logger = logging.getLogger(__name__) 30 | build_logger_old = logging.getLogger('build') 31 | build_logger = structlog.get_logger(logger_name='build') 32 | 33 | class Repo: 34 | gh: Optional[GitHub] 35 | 36 | def __init__(self, config: dict[str, Any]) -> None: 37 | self.myaddress = config['lilac']['email'] 38 | self.mymaster = config['lilac']['master'] 39 | self.logurl_template = config['lilac'].get('logurl') 40 | self.repomail = config['repository']['email'] 41 | self.name = config['repository']['name'] 42 | self.trim_ansi_codes = not config['smtp'].get('use_ansi', False) 43 | self.commit_msg_prefix = config['lilac'].get('commit_msg_prefix', '') 44 | 45 | self.repodir = Path(config['repository']['repodir']).expanduser() 46 | self.bindmounts = self._get_bindmounts(config.get('bindmounts')) 47 | self.tmpfs = config.get('misc', {}).get('tmpfs', []) 48 | 49 | self.ms = MailService(config) 50 | github_token = config['lilac'].get('github_token') 51 | if github_token: 52 | self.gh = GitHub(github_token) 53 | else: 54 | self.gh = None 55 | 56 | self.on_built_cmds = config.get('misc', {}).get('postbuild', []) 57 | 58 | self.lilacinfos: LilacInfos = {} # to be filled by self.load_all_lilac_and_report() 59 | self.yamls: dict[str, Any] = {} 60 | self._maint_cache: dict[str, list[Maintainer]] = {} 61 | 62 | @lru_cache() 63 | def maintainer_from_github(self, username: str) -> Optional[Maintainer]: 64 | if self.gh is None: 65 | l10n = intl.get_l10n('mail') 66 | msg = l10n.format_value('github-token-not-set') 67 | raise ValueError(msg) 68 | 69 | userinfo = self.gh.get_user_info(username) 70 | if userinfo['email']: 71 | return Maintainer(userinfo['name'] or username, userinfo['email'], username) 72 | else: 73 | return None 74 | 75 | def parse_maintainers( 76 | self, 77 | ms: List[Dict[str, str]], 78 | ) -> Tuple[List[Maintainer], List[str]]: 79 | ret = [] 80 | errors = [] 81 | 82 | l10n = intl.get_l10n('mail') 83 | for m in ms: 84 | if 'github' in m and 'email' in m: 85 | ret.append( 86 | Maintainer.from_email_address(m['email'], m['github']) 87 | ) 88 | elif 'github' in m: 89 | try: 90 | u = self.maintainer_from_github(m['github']) 91 | except Exception as e: 92 | msg = l10n.format_value('github-email-error', {'error': repr(e)}) 93 | errors.append(msg) 94 | else: 95 | if u is None: 96 | msg = l10n.format_value('github-email-private', {'user': m['github']}) 97 | errors.append(msg) 98 | else: 99 | ret.append(u) 100 | else: 101 | logger.error('unsupported maintainer info: %r', m) 102 | msg = l10n.format_value('unsupported-maintainer-info', {'info': repr(m)}) 103 | errors.append(msg) 104 | continue 105 | 106 | return ret, errors 107 | 108 | def find_dependents( 109 | self, pkgbase: str, 110 | ) -> List[str]: 111 | if self.lilacinfos: 112 | return self._find_dependents_heavy(pkgbase) 113 | else: 114 | return self._find_dependents_lite(pkgbase) 115 | 116 | def _find_dependents_heavy( 117 | self, pkgbase: str, 118 | ) -> List[str]: 119 | '''find_dependents for main process''' 120 | ret = [] 121 | 122 | for info in self.lilacinfos.values(): 123 | ds = info.repo_depends 124 | if any(x == pkgbase for x, y in ds): 125 | ret.append(info.pkgbase) 126 | 127 | return ret 128 | 129 | def _find_dependents_lite( 130 | self, pkgbase: str, 131 | ) -> List[str]: 132 | '''find_dependents for worker process''' 133 | ret = [] 134 | self._load_yamls_ignore_errors() 135 | 136 | for p, yamlconf in self.yamls.items(): 137 | ds = yamlconf.get('repo_depends', ()) 138 | if any(x == pkgbase for x, y in ds): 139 | ret.append(p) 140 | 141 | return ret 142 | 143 | def _load_yamls_ignore_errors(self) -> None: 144 | if self.yamls: 145 | return 146 | 147 | for dir in lilacyaml.iter_pkgdir(self.repodir): 148 | try: 149 | yamlconf = lilacyaml.load_lilac_yaml(dir) 150 | except Exception: 151 | pass 152 | else: 153 | self.yamls[dir.name] = yamlconf 154 | 155 | def find_maintainers( 156 | self, mod: Union[LilacInfo, LilacMod], 157 | fallback_git: bool = True, 158 | ) -> List[Maintainer]: 159 | if mod.pkgbase not in self._maint_cache: 160 | mts = self._find_maintainers_impl( 161 | mod.pkgbase, 162 | maintainers = getattr(mod, 'maintainers', None), 163 | fallback_git = fallback_git, 164 | ) 165 | self._maint_cache[mod.pkgbase] = mts 166 | return self._maint_cache[mod.pkgbase] 167 | 168 | def _find_maintainers_impl( 169 | self, 170 | pkgbase: str, 171 | maintainers: Optional[List[Dict[str, str]]], 172 | fallback_git: bool = True, 173 | ) -> List[Maintainer]: 174 | ret: List[Maintainer] = [] 175 | errors: List[str] = [] 176 | 177 | if maintainers is not None: 178 | if maintainers: 179 | ret, errors = self.parse_maintainers(maintainers) 180 | else: 181 | dependents = self.find_dependents(pkgbase) 182 | for pkg in dependents: 183 | if self.lilacinfos: 184 | maintainers = self.lilacinfos[pkg].maintainers 185 | else: 186 | maintainers = self.yamls[pkg].get('maintainers') 187 | dmaints = self._find_maintainers_impl( 188 | pkg, maintainers, fallback_git=False, 189 | ) 190 | ret.extend(dmaints) 191 | 192 | if (not ret and fallback_git) or errors: 193 | # fallback to git 194 | dir = self.repodir / pkgbase 195 | git_maintainer = self.find_maintainer_by_git(dir) 196 | 197 | if errors: 198 | error_str = '\n'.join(errors) 199 | l10n = intl.get_l10n('mail') 200 | self.sendmail( 201 | git_maintainer, 202 | subject = l10n.format_value('maintainers-error-subject', {'pkg': pkgbase}), 203 | msg = l10n.format_value('maintainers-error-body') + f'\n\n{error_str}\n', 204 | ) 205 | 206 | if not ret and fallback_git: 207 | logger.warning("lilac doesn't give out maintainers for %s, " 208 | "fallback to git.", pkgbase) 209 | return [git_maintainer] 210 | else: 211 | return ret 212 | 213 | def find_maintainer_by_git( 214 | self, 215 | dir: Path = Path('.'), 216 | file: str = '*', 217 | ) -> Maintainer: 218 | 219 | me = self.myaddress 220 | 221 | cmd = [ 222 | "git", "log", "--format=%H %an <%ae>", "--", file, 223 | ] 224 | p = subprocess.Popen( 225 | cmd, stdout=subprocess.PIPE, universal_newlines=True, 226 | cwd = dir, 227 | ) 228 | 229 | try: 230 | stdout = p.stdout 231 | assert stdout 232 | while True: 233 | line = stdout.readline() 234 | if not line: 235 | logger.error('history exhausted while finding maintainer, stop.') 236 | raise Exception('maintainer cannot be found') 237 | commit, author = line.rstrip().split(None, 1) 238 | if me not in author: 239 | return Maintainer.from_email_address(author) 240 | finally: 241 | p.terminate() 242 | 243 | def report_error(self, subject: str, msg: str) -> None: 244 | self.ms.sendmail(self.mymaster, subject, msg) 245 | 246 | def send_error_report( 247 | self, 248 | mod: Union[LilacInfo, LilacMod, str], *, 249 | msg: Optional[str] = None, 250 | exc: Optional[Exception] = None, 251 | subject: Optional[str] = None, 252 | logfile: Optional[Path] = None, 253 | ) -> None: 254 | ''' 255 | the mod argument can be a LilacInfo, or LilacMod (for worker), or a str in case the module cannot be loaded, 256 | in that case we use git to find a maintainer. 257 | ''' 258 | if msg is None and exc is None: 259 | raise TypeError('send_error_report received insufficient args') 260 | 261 | if isinstance(mod, str): 262 | maintainers = [self.find_maintainer_by_git(file=mod)] 263 | pkgbase = mod 264 | else: 265 | maintainers = self.find_maintainers(mod) 266 | pkgbase = mod.pkgbase 267 | 268 | msgs = [] 269 | if msg is not None: 270 | msgs.append(msg) 271 | 272 | l10n = intl.get_l10n('mail') 273 | 274 | if exc is not None: 275 | tb = ''.join(traceback.format_exception(type(exc), exc, exc.__traceback__)) 276 | if isinstance(exc, subprocess.CalledProcessError): 277 | subject_real = subject or l10n.format_value('packaging-error-subprocess-subject') 278 | msg1 = l10n.format_value('packaging-error-subprocess', { 279 | 'cmd': repr(exc.cmd), 280 | 'returncode': exc.returncode, 281 | }) 282 | msgs.append(msg1) 283 | if exc.output: 284 | msg1 = l10n.format_value('packaging-error-subprocess-output') 285 | msgs.append(msg1 + '\n\n' + exc.output) 286 | msg1 = l10n.format_value('packaging-error-traceback') 287 | msgs.append(msg1 + '\n\n' + tb) 288 | elif isinstance(exc, api.AurDownloadError): 289 | subject_real = subject or l10n.format_value('packaging-error-aur-subject') 290 | msg1 = l10n.format_value('packaging-error-aur') 291 | msgs.append(msg1 + '\n\n') 292 | msg1 = l10n.format_value('packaging-error-traceback') 293 | msgs.append(msg1 + '\n\n' + tb) 294 | elif isinstance(exc, TimeoutError): 295 | subject_real = subject or l10n.format_value('packaging-error-timeout-subject') 296 | else: 297 | subject_real = subject or l10n.format_value('packaging-error-unknown-subject') 298 | msg1 = l10n.format_value('packaging-error-unknown') 299 | msgs.append(msg1 + '\n\n' + tb) 300 | else: 301 | if subject is None: 302 | raise ValueError('subject should be given but not') 303 | subject_real = subject 304 | 305 | if '%s' in subject_real: 306 | subject_real = subject_real % pkgbase 307 | 308 | if logfile: 309 | with suppress(FileNotFoundError): 310 | # we need to replace error characters because the mail will be 311 | # strictly encoded, disallowing surrogate pairs 312 | with logfile.open(errors='replace') as f: 313 | build_output = f.read() 314 | if build_output: 315 | log_header = l10n.format_value('packaging-log') 316 | with suppress(ValueError, KeyError): # invalid template or wrong key 317 | if self.logurl_template and len(logfile.parts) >= 2: 318 | # assume the directory name is the time stamp for now. 319 | logurl = string.Template(self.logurl_template).substitute( 320 | datetime = logfile.parts[-2], 321 | timestamp = int(time.time()), 322 | pkgbase = pkgbase, 323 | ) 324 | log_header += ' ' + logurl 325 | msgs.append(log_header) 326 | msgs.append('\n' + build_output) 327 | 328 | msg = '\n'.join(msgs) 329 | if self.trim_ansi_codes: 330 | msg = ansi_escape_re.sub('', msg) 331 | 332 | addresses = [str(x) for x in maintainers] 333 | logger.debug('mail to %s:\nsubject: %s\nbody: %s', 334 | addresses, subject_real, msg[:200]) 335 | self.sendmail(addresses, subject_real, msg) 336 | 337 | def sendmail(self, who: Union[str, List[str], Maintainer], 338 | subject: str, msg: str) -> None: 339 | if isinstance(who, Maintainer): 340 | who = str(who) 341 | self.ms.sendmail(who, subject, msg) 342 | 343 | def send_repo_mail(self, subject: str, msg: str) -> None: 344 | self.ms.sendmail(self.repomail, subject, msg) 345 | 346 | def manages(self, dep: Dependency) -> bool: 347 | return dep.pkgdir.name in self.lilacinfos 348 | 349 | def load_managed_lilac_and_report(self) -> dict[str, tuple[str, ...]]: 350 | self.lilacinfos, errors = lilacyaml.load_managed_lilacinfos(self.repodir) 351 | failed: dict[str, tuple[str, ...]] = {p: () for p in errors} 352 | l10n = intl.get_l10n('mail') 353 | for name, exc_info in errors.items(): 354 | logger.error('error while loading lilac.yaml for %s', name, exc_info=exc_info) 355 | exc = exc_info[1] 356 | if not isinstance(exc, Exception): 357 | raise 358 | self.send_error_report(name, exc=exc, 359 | subject=l10n.format_value('lilac-yaml-loadding-error')) 360 | build_logger_old.error('%s failed', name) 361 | build_logger.exception('lilac.yaml error', pkgbase = name, exc_info=exc_info) 362 | 363 | return failed 364 | 365 | def on_built(self, pkg: str, result: BuildResult, version: Optional[str]) -> None: 366 | if not self.on_built_cmds: 367 | return 368 | 369 | env = os.environ.copy() 370 | env['PKGBASE'] = pkg 371 | env['RESULT'] = result.__class__.__name__ 372 | env['VERSION'] = version or '' 373 | for cmd in self.on_built_cmds: 374 | try: 375 | subprocess.check_call(cmd, env=env) 376 | except Exception: 377 | logger.exception('postbuild cmd error for %r', cmd) 378 | 379 | def _get_bindmounts( 380 | self, bindmounts: Optional[dict[str, str]], 381 | ) -> list[str]: 382 | if bindmounts is None: 383 | return [] 384 | 385 | items = [(os.path.expanduser(src), dst) 386 | for src, dst in bindmounts.items()] 387 | items.sort(reverse=True) 388 | return [f'{src}:{dst}' for src, dst in items] 389 | -------------------------------------------------------------------------------- /lilac2/slogconf.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import time 4 | 5 | import structlog 6 | from structlog.types import WrappedLogger, EventDict 7 | 8 | def exc_info( 9 | logger: WrappedLogger, level: str, event: EventDict, 10 | ) -> EventDict: 11 | if level == 'exception' and 'exc_info' not in event: 12 | event['exc_info'] = True 13 | return event 14 | 15 | _renderer = structlog.processors.JSONRenderer( 16 | ensure_ascii=False) 17 | 18 | def json_renderer(logger: WrappedLogger, level: str, event: EventDict) -> str | bytes: 19 | event['level'] = level 20 | return _renderer(logger, level, event) 21 | 22 | def add_timestamp( 23 | logger: WrappedLogger, level: str, event: EventDict, 24 | ) -> EventDict: 25 | event['ts'] = time.time() 26 | return event 27 | 28 | -------------------------------------------------------------------------------- /lilac2/systemd.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from typing import Generator, Any, Optional 4 | import select 5 | import time 6 | import logging 7 | import threading 8 | 9 | from .typing import Cmd, RUsage 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | _available = None 14 | _check_lock = threading.Lock() 15 | 16 | def available() -> bool | dict[str, bool]: 17 | global _available 18 | 19 | with _check_lock: 20 | if _available is None: 21 | _available = _check_availability() 22 | logger.debug('systemd availability: %s', _available) 23 | return _available 24 | 25 | def _cgroup_memory_usage(cgroup: str) -> int: 26 | mem_file = f'/sys/fs/cgroup{cgroup}/memory.peak' 27 | with open(mem_file) as f: 28 | return int(f.read().rstrip()) 29 | 30 | def _cgroup_cpu_usage(cgroup: str) -> int: 31 | cpu_file = f'/sys/fs/cgroup{cgroup}/cpu.stat' 32 | with open(cpu_file) as f: 33 | for l in f: 34 | if l.startswith('usage_usec '): 35 | return int(l.split()[1]) * 1000 36 | return 0 37 | 38 | def _check_availability() -> bool | dict[str, bool]: 39 | if 'DBUS_SESSION_BUS_ADDRESS' not in os.environ: 40 | dbus = f'/run/user/{os.getuid()}/bus' 41 | if not os.path.exists(dbus): 42 | return False 43 | os.environ['DBUS_SESSION_BUS_ADDRESS'] = f'unix:path={dbus}' 44 | p = subprocess.run([ 45 | 'systemd-run', '--quiet', '--user', 46 | '--remain-after-exit', '-u', 'lilac-check', 'true', 47 | ]) 48 | if p.returncode != 0: 49 | return False 50 | 51 | try: 52 | while True: 53 | ps: dict[str, Optional[int]] = { 54 | 'CPUUsageNSec': None, 55 | 'MemoryPeak': None, 56 | 'MainPID': None, 57 | } 58 | _read_service_int_properties('lilac-check', ps) 59 | if ps['MainPID'] != 0: 60 | time.sleep(0.01) 61 | continue 62 | 63 | ret = {} 64 | for k, v in ps.items(): 65 | ret[k] = v is not None 66 | 67 | return ret 68 | finally: 69 | subprocess.run(['systemctl', '--user', 'stop', '--quiet', 'lilac-check']) 70 | 71 | def _read_service_int_properties(name: str, properties: dict[str, Optional[int]]) -> None: 72 | cmd = [ 73 | 'systemctl', '--user', 'show', f'{name}.service', 74 | ] + [f'--property={k}' for k in properties] 75 | 76 | out = subprocess.check_output(cmd, text=True) 77 | for l in out.splitlines(): 78 | k, v = l.split('=', 1) 79 | if k in properties: 80 | try: 81 | properties[k] = int(v) 82 | except ValueError: 83 | # [not set] 84 | pass 85 | 86 | def start_cmd( 87 | name: str, cmd: Cmd, 88 | setenv: dict[str, str] = {}, 89 | **kwargs: Any, # can't use P.kwargs here because there is no place for P.args 90 | ) -> subprocess.Popen: 91 | # don't use --collect here because it will be immediately collected when 92 | # failed 93 | cmd_s: Cmd = [ 94 | 'systemd-run', '--pipe', '--quiet', '--user', 95 | '--wait', '--remain-after-exit', '-u', name, 96 | '-p', 'CPUWeight=100', '-p', 'KillMode=process', 97 | '-p', 'KillSignal=INT', 98 | ] 99 | 100 | if cwd := kwargs.pop('cwd', None): 101 | cmd_s += [f'--working-directory={str(cwd)}'] # type: ignore 102 | 103 | cmd_setenv = [f'--setenv={k}={v}' for k, v in setenv.items()] 104 | cmd_s = cmd_s + cmd_setenv + ['--'] + cmd # type: ignore 105 | logger.debug('running %s', subprocess.list2cmdline(cmd_s)) 106 | return subprocess.Popen(cmd_s, **kwargs) 107 | 108 | def _get_service_info(name: str) -> tuple[int, str, str]: 109 | '''return pid and control group path''' 110 | out = subprocess.check_output([ 111 | 'systemctl', '--user', 'show', f'{name}.service', 112 | '--property=MainPID', 113 | '--property=ControlGroup', 114 | '--property=SubState', 115 | ], text=True) 116 | pid = 0 117 | cgroup = '' 118 | state = '' 119 | for l in out.splitlines(): 120 | k, v = l.split('=', 1) 121 | if k == 'MainPID': 122 | pid = int(v) 123 | elif k == 'ControlGroup': 124 | cgroup = v 125 | elif k == 'SubState': 126 | state = v 127 | return pid, cgroup, state 128 | 129 | def _poll_cmd(pid: int) -> Generator[None, None, None]: 130 | try: 131 | pidfd = os.pidfd_open(pid) 132 | except OSError as e: 133 | if e.errno == 22: 134 | return 135 | raise 136 | 137 | poll = select.poll() 138 | poll.register(pidfd, select.POLLIN) 139 | 140 | try: 141 | while True: 142 | ret = poll.poll(1_000) 143 | if ret: 144 | logger.debug('worker exited') 145 | return 146 | yield 147 | finally: 148 | os.close(pidfd) 149 | 150 | def poll_rusage(name: str, deadline: float) -> tuple[RUsage, bool]: 151 | timedout = False 152 | done_state = ['exited', 'failed'] 153 | 154 | try: 155 | cgroup = '' 156 | time_start = time.monotonic() 157 | while True: 158 | pid, cgroup, state = _get_service_info(name) 159 | if (not pid or not cgroup) and state not in done_state: 160 | if time.monotonic() - time_start > 60: 161 | logger.error('%s.service not started in 60s, giving up.', name) 162 | raise Exception('systemd error: service not started in 60s') 163 | logger.debug('%s.service state: %s, waiting', name, state) 164 | time.sleep(0.1) 165 | else: 166 | break 167 | 168 | if state in done_state: 169 | logger.warning('%s.service already finished: %s', name, state) 170 | return RUsage(0, 0), False 171 | 172 | nsec = 0 173 | mem_max = 0 174 | availability = available() 175 | assert isinstance(availability, dict) 176 | for _ in _poll_cmd(pid): 177 | if not availability['CPUUsageNSec']: 178 | nsec = _cgroup_cpu_usage(cgroup) 179 | if not availability['MemoryPeak']: 180 | mem_max = _cgroup_memory_usage(cgroup) 181 | if time.time() > deadline: 182 | timedout = True 183 | break 184 | 185 | # systemd will remove the cgroup as soon as the process exits 186 | # instead of racing with systemd, we just ask it for the data 187 | ps: dict[str, Optional[int]] = { 188 | 'CPUUsageNSec': None, 189 | 'MemoryPeak': None, 190 | } 191 | _read_service_int_properties(name, ps) 192 | if n := ps['CPUUsageNSec']: 193 | nsec = n 194 | if n := ps['MemoryPeak']: 195 | mem_max = n 196 | 197 | finally: 198 | logger.debug('stopping worker service') 199 | # stop whatever may be running (even from a previous batch) 200 | subprocess.run(['systemctl', '--user', 'stop', '--quiet', name]) 201 | if cgroup: 202 | # if we actually got the cgroup (i.e. service was started when we looked) 203 | wait_cgroup_disappear(cgroup) 204 | 205 | p = subprocess.run(['systemctl', '--user', 'is-failed', '--quiet', name]) 206 | if p.returncode == 0: 207 | subprocess.run(['systemctl', '--user', 'reset-failed', '--quiet', name]) 208 | return RUsage(nsec / 1_000_000_000, mem_max), timedout 209 | 210 | def wait_cgroup_disappear(cgroup: str) -> None: 211 | d = f'/sys/fs/cgroup/{cgroup}' 212 | if not os.path.exists(d): 213 | return 214 | 215 | while os.path.exists(d): 216 | logger.warning('waiting %s to disappear...', cgroup) 217 | time.sleep(1) 218 | -------------------------------------------------------------------------------- /lilac2/tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | import subprocess 5 | from typing import Dict, Any 6 | import os 7 | import logging 8 | from contextlib import suppress 9 | 10 | import tomllib 11 | 12 | from .const import mydir 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | ansi_escape_re = re.compile(r'\x1B(\[[0-?]*[ -/]*[@-~]|\(B)') 17 | 18 | def kill_child_processes() -> None: 19 | logger.debug('killing child processes (if any)') 20 | subprocess.run(['kill_children']) 21 | 22 | def read_config() -> Dict[str, Any]: 23 | config_file = mydir / 'config.toml' 24 | with open(config_file, 'rb') as f: 25 | return tomllib.load(f) 26 | 27 | def reap_zombies() -> None: 28 | # reap any possible dead children since we are a subreaper 29 | with suppress(ChildProcessError): 30 | while os.waitid(os.P_ALL, 0, os.WEXITED | os.WNOHANG) is not None: 31 | pass 32 | 33 | def get_running_task_cpu_ratio() -> float: 34 | ncpu = os.process_cpu_count() 35 | running = 0 36 | with open('/proc/stat') as f: 37 | for l in f: 38 | if l.startswith('procs_running '): 39 | running = int(l.split()[1]) 40 | break 41 | if ncpu and running: 42 | return running / ncpu 43 | else: 44 | return 0.0 45 | 46 | def get_avail_memory() -> int: 47 | with open('/proc/meminfo') as f: 48 | for l in f: 49 | if l.startswith('MemAvailable:'): 50 | return int(l.split()[1]) * 1024 51 | return 10 * 1024 ** 3 52 | -------------------------------------------------------------------------------- /lilac2/typing.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import types 4 | from typing import ( 5 | Union, Dict, Tuple, Type, NamedTuple, Optional, 6 | Sequence, 7 | ) 8 | from pathlib import Path 9 | import dataclasses 10 | import datetime 11 | 12 | class LilacMod(types.ModuleType): 13 | time_limit_hours: float 14 | pkgbase: str 15 | _G: types.SimpleNamespace 16 | makechrootpkg_args: list[str] 17 | makepkg_args: list[str] 18 | build_args: list[str] 19 | update_on: NvEntries 20 | 21 | NvEntry = dict[str, str] 22 | NvEntries = list[NvEntry] 23 | 24 | @dataclasses.dataclass 25 | class OnBuildEntry: 26 | pkgbase: str 27 | from_pattern: Optional[str] = None 28 | to_pattern: Optional[str] = None 29 | 30 | @dataclasses.dataclass 31 | class LilacInfo: 32 | pkgbase: str 33 | maintainers: list[dict[str, str]] 34 | update_on: NvEntries 35 | update_on_build: list[OnBuildEntry] 36 | throttle_info: dict[int, datetime.timedelta] 37 | repo_depends: list[tuple[str, str]] 38 | repo_makedepends: list[tuple[str, str]] 39 | time_limit_hours: float 40 | staging: bool 41 | managed: bool 42 | 43 | LilacInfos = Dict[str, LilacInfo] 44 | 45 | ExcInfo = Tuple[Type[BaseException], BaseException, types.TracebackType] 46 | 47 | Cmd = Sequence[Union[str, Path]] 48 | PathLike = Union[str, Path] 49 | 50 | class Maintainer(NamedTuple): 51 | name: str 52 | email: str 53 | github: Optional[str] 54 | 55 | def __str__(self) -> str: 56 | return f'{self.name} <{self.email}>' 57 | 58 | @classmethod 59 | def from_email_address( 60 | cls, s: str, github: Optional[str] = None, 61 | ) -> Maintainer: 62 | if '<' in s: 63 | name, email = s.split('<', 1) 64 | name = name.strip('" ') 65 | email = email.rstrip('>') 66 | else: 67 | name = s.rsplit('@', 1)[0] 68 | email = s 69 | return cls(name, email, github) 70 | 71 | PkgRel = Union[int, str] 72 | 73 | class PkgVers(NamedTuple): 74 | epoch: Optional[str] 75 | pkgver: str 76 | pkgrel: str 77 | 78 | def __str__(self) -> str: 79 | if self.epoch: 80 | return f'{self.epoch}:{self.pkgver}-{self.pkgrel}' 81 | else: 82 | return f'{self.pkgver}-{self.pkgrel}' 83 | 84 | class RUsage(NamedTuple): 85 | cputime: float 86 | memory: int 87 | 88 | class UsedResource(NamedTuple): 89 | cputime: float 90 | memory: int 91 | elapsed: int 92 | 93 | OnBuildVers = list[tuple[str, str]] 94 | class PkgToBuild(NamedTuple): 95 | pkgbase: str 96 | on_build_vers: OnBuildVers = [] 97 | -------------------------------------------------------------------------------- /lilac2/vendor/__init__.py: -------------------------------------------------------------------------------- 1 | # dummy file to make setuptools recognize lilac2.vendor 2 | -------------------------------------------------------------------------------- /lilac2/vendor/archpkg.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from collections import namedtuple 5 | import subprocess 6 | import re 7 | from typing import List, Dict 8 | 9 | import pyalpm 10 | 11 | class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')): 12 | def __lt__(self, other) -> bool: 13 | if self.name != other.name or self.arch != other.arch: 14 | return NotImplemented 15 | if self.version != other.version: 16 | return pyalpm.vercmp(self.version, other.version) < 0 17 | return float(self.release) < float(other.release) 18 | 19 | def __gt__(self, other) -> bool: 20 | # No, try the other side please. 21 | return NotImplemented 22 | 23 | @property 24 | def fullversion(self) -> str: 25 | return '%s-%s' % (self.version, self.release) 26 | 27 | @classmethod 28 | def parseFilename(cls, filename: str) -> 'PkgNameInfo': 29 | return cls(*trimext(filename, 3).rsplit('-', 3)) 30 | 31 | def trimext(name: str, num: int = 1) -> str: 32 | for i in range(num): 33 | name = os.path.splitext(name)[0] 34 | return name 35 | 36 | def get_pkgname_with_bash(PKGBUILD: str) -> List[str]: 37 | script = '''\ 38 | . '%s' 39 | echo ${pkgname[*]}''' % PKGBUILD 40 | # Python 3.4 has 'input' arg for check_output 41 | p = subprocess.Popen( 42 | ['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home', 43 | '--tmpfs', '/run', '--die-with-parent', 44 | '--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'], 45 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, 46 | ) 47 | output = p.communicate(script.encode())[0].decode() 48 | ret = p.wait() 49 | if ret != 0: 50 | raise subprocess.CalledProcessError( 51 | ret, ['bash'], output) 52 | return output.split() 53 | 54 | pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$') 55 | 56 | def _strip_ver(s: str) -> str: 57 | return re.sub(r'[<>=].*', '', s) 58 | 59 | def get_package_info(name: str, local: bool = False) -> Dict[str, str]: 60 | old_lang = os.environ['LANG'] 61 | os.environ['LANG'] = 'C' 62 | args = '-Qi' if local else '-Si' 63 | try: 64 | outb = subprocess.check_output(["pacman", args, name]) 65 | out = outb.decode('latin1') 66 | finally: 67 | os.environ['LANG'] = old_lang 68 | 69 | ret = {} 70 | for l in out.splitlines(): 71 | if not l: 72 | continue 73 | if l[0] not in ' \t': 74 | key, value = l.split(':', 1) 75 | key = key.strip() 76 | value = value.strip() 77 | ret[key] = value 78 | else: 79 | ret[key] += ' ' + l.strip() 80 | return ret 81 | 82 | -------------------------------------------------------------------------------- /lilac2/vendor/github.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import weakref 5 | from typing import Any, Iterator, Dict 6 | 7 | from . import requestsutils 8 | from requests import Response 9 | 10 | JsonDict = Dict[str, Any] 11 | 12 | def parse_datetime(s: str) -> datetime.datetime: 13 | dt = datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ') 14 | return dt.replace(tzinfo=datetime.timezone.utc) 15 | 16 | class GitHub(requestsutils.RequestsBase): 17 | baseurl = 'https://api.github.com/' 18 | 19 | def __init__(self, token=None, *, session=None): 20 | if token: 21 | self.token = f'token {token}' 22 | else: 23 | self.token = None 24 | super().__init__(session=session) 25 | 26 | def api_request(self, path, *args, method='get', data=None, **kwargs): 27 | h = kwargs.get('headers', None) 28 | if not h: 29 | h = kwargs['headers'] = {} 30 | h.setdefault('Accept', 'application/vnd.github.v3+json') 31 | if self.token: 32 | h.setdefault('Authorization', self.token) 33 | 34 | if data: 35 | kwargs['json'] = data 36 | if method == 'get': 37 | method = 'post' 38 | 39 | return self.request(path, method=method, *args, **kwargs) 40 | 41 | def get_issue(self, repo: str, issue_nr: int) -> 'Issue': 42 | r = self.api_request( 43 | f'/repos/{repo}/issues/{issue_nr}') 44 | j = r.json() 45 | return Issue(j, self) 46 | 47 | def get_repo_issues(self, repo, *, state='open', labels=''): 48 | params = {'state': state} 49 | if labels: 50 | params['labels'] = labels 51 | r = self.api_request(f'/repos/{repo}/issues', params = params) 52 | 53 | yield from (Issue(x, self) for x in r.json()) 54 | while 'next' in r.links: 55 | r = self.api_request(r.links['next']['url']) 56 | yield from (Issue(x, self) for x in r.json()) 57 | 58 | def get_user_info(self, username: str) -> Any: 59 | r = self.api_request(f'/users/{username}') 60 | return r.json() 61 | 62 | def get_actions_artifacts(self, repo: str) -> Iterator[Any]: 63 | r = self.api_request(f'/repos/{repo}/actions/artifacts') 64 | yield from r.json()['artifacts'] 65 | while 'next' in r.links: 66 | r = self.api_request(r.links['next']['url']) 67 | yield from r.json()['artifacts'] 68 | 69 | def add_issue_comment( 70 | self, repo: str, issue_nr: int, comment: str, 71 | ) -> Response: 72 | return self.api_request( 73 | f'/repos/{repo}/issues/{issue_nr}/comments', 74 | data = {'body': comment}, 75 | ) 76 | 77 | class Issue: 78 | def __init__(self, data: JsonDict, gh: GitHub) -> None: 79 | self.gh = weakref.proxy(gh) 80 | self._data = data 81 | self.body = data['body'] 82 | self.number = data['number'] 83 | self.title = data['title'] 84 | self.labels = [x['name'] for x in data['labels']] 85 | self.updated_at = parse_datetime(data['updated_at']) 86 | self._api_url = f"{data['repository_url']}/issues/{data['number']}" 87 | 88 | def comment(self, comment: str) -> Response: 89 | return self.gh.api_request(f'{self._api_url}/comments', data = {'body': comment}) 90 | 91 | def add_labels(self, labels): 92 | if not isinstance(labels, (list, tuple)): 93 | raise TypeError('labels should be a list') 94 | return self.gh.api_request(f'{self._api_url}/labels', data = labels) 95 | 96 | def close(self) -> None: 97 | self.gh.api_request(f'{self._api_url}', method = 'patch', 98 | data = {'state': 'closed'}) 99 | 100 | def __repr__(self) -> str: 101 | return f'' 102 | -------------------------------------------------------------------------------- /lilac2/vendor/htmlutils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | import copy 5 | from html.entities import entitydefs 6 | 7 | from lxml import html 8 | 9 | def _br2span_inplace(el): 10 | for br in el.iterchildren(tag='br'): 11 | sp = html.Element('span') 12 | sp.text = '\n' 13 | sp.tail = br.tail 14 | el.replace(br, sp) 15 | 16 | def extractText(el): 17 | el = copy.copy(el) 18 | _br2span_inplace(el) 19 | return el.text_content() 20 | 21 | def iter_text_and_br(el): 22 | if el.text: 23 | yield el.text 24 | for i in el.iterchildren(): 25 | if i.tag == 'br': 26 | yield '\n' 27 | if i.tail: 28 | yield i.tail 29 | 30 | def un_jsescape(s): 31 | '''%xx & %uxxxx -> char, opposite of Javascript's escape()''' 32 | return re.sub( 33 | r'%u([0-9a-fA-F]{4})|%([0-9a-fA-F]{2})', 34 | lambda m: chr(int(m.group(1) or m.group(2), 16)), 35 | s 36 | ) 37 | 38 | def entityunescape(string): 39 | '''HTML entity decode''' 40 | string = re.sub(r'&#[^;]+;', _sharp2uni, string) 41 | string = re.sub(r'&[^;]+;', lambda m: entitydefs[m.group(0)[1:-1]], string) 42 | return string 43 | 44 | def entityunescape_loose(string): 45 | '''HTML entity decode. losse version.''' 46 | string = re.sub(r'&#[0-9a-fA-F]+[;;]?', _sharp2uni, string) 47 | string = re.sub(r'&\w+[;;]?', lambda m: entitydefs[m.group(0)[1:].rstrip(';;')], string) 48 | return string 49 | 50 | def _sharp2uni(m): 51 | '''&#...; ==> unicode''' 52 | s = m.group(0)[2:].rstrip(';;') 53 | if s.startswith('x'): 54 | return chr(int('0'+s, 16)) 55 | else: 56 | return chr(int(s)) 57 | 58 | def parse_document_from_requests(response, session=None, *, encoding=None): 59 | ''' 60 | ``response``: requests ``Response`` object, or URL 61 | ``encoding``: override detected encoding 62 | ''' 63 | if isinstance(response, str): 64 | if session is None: 65 | raise ValueError('URL given but no session') 66 | r = session.get(response) 67 | else: 68 | r = response 69 | if encoding: 70 | r.encoding = encoding 71 | 72 | # fromstring handles bytes well 73 | # https://stackoverflow.com/a/15305248/296473 74 | parser = html.HTMLParser(encoding=encoding or r.encoding) 75 | doc = html.fromstring(r.content, base_url=r.url, parser=parser) 76 | doc.make_links_absolute() 77 | 78 | return doc 79 | 80 | def parse_html_with_encoding(data, encoding='utf-8'): 81 | parser = html.HTMLParser(encoding=encoding) 82 | return html.fromstring(data, parser=parser) 83 | -------------------------------------------------------------------------------- /lilac2/vendor/mailutils.py: -------------------------------------------------------------------------------- 1 | # vim:fileencoding=utf-8 2 | 3 | from __future__ import annotations 4 | 5 | import re 6 | import datetime 7 | import codecs 8 | import smtplib 9 | from email import header 10 | import email.header 11 | from email.header import Header 12 | from email.mime.text import MIMEText 13 | from email.mime.multipart import MIMEMultipart 14 | from email.message import Message 15 | from typing import Union, Iterable, Optional, cast 16 | 17 | addr_re = re.compile(r'(.*?)\s+(<[^>]+>)($|,\s*)') 18 | 19 | def decode_multiline_header(s): 20 | ret = [] 21 | 22 | for b, e in header.decode_header(re.sub(r'\n\s+', ' ', s)): 23 | if e: 24 | if e.lower() == 'gb2312': 25 | e = 'gb18030' 26 | b = b.decode(e) 27 | elif isinstance(b, bytes): 28 | b = b.decode('ascii') 29 | ret.append(b) 30 | 31 | return ''.join(ret) 32 | 33 | def get_datetime(m): 34 | d = m['Date'] 35 | # Wed, 18 Jun 2014 04:09:18 +0000 36 | t = datetime.datetime.strptime(d, '%a, %d %b %Y %H:%M:%S %z') 37 | # convert to local time 38 | return datetime.datetime.fromtimestamp(t.timestamp()) 39 | 40 | def decode_payload(m, *, binary=False): 41 | p = m.get_payload() 42 | enc = m['Content-Transfer-Encoding'] 43 | ctype = m['Content-Type'] 44 | if enc == '8bit': 45 | return p 46 | else: 47 | data = codecs.decode(p.encode(), enc) 48 | if not binary: 49 | charset = get_charset_from_ctype(ctype) or 'utf-8' 50 | data = data.decode(charset) 51 | return data 52 | 53 | def assemble_mail( 54 | subject: str, to: Union[str, Iterable[str]], from_: str, 55 | html: Optional[str] = None, text: Optional[str] = None, 56 | ): 57 | if html is None and text is None: 58 | raise TypeError('no message given') 59 | 60 | html_msg: Optional[MIMEText] = None 61 | text_msg: Optional[MIMEText] = None 62 | 63 | if html: 64 | html_msg = MIMEText(html, 'html', 'utf-8') 65 | if text: 66 | text_msg = MIMEText(text, 'plain', 'utf-8') 67 | 68 | msg: Message 69 | if html_msg and text_msg: 70 | msg = MIMEMultipart('alternative', _subparts = [text_msg, html_msg]) 71 | else: 72 | msg = cast(Message, html_msg or text_msg) 73 | 74 | msg['Subject'] = encode_header(subject) 75 | msg['From'] = encode_header_address(from_) 76 | if isinstance(to, str): 77 | msg['To'] = encode_header_address(to) 78 | else: 79 | msg['To'] = ', '.join(encode_header_address(x) for x in to) 80 | 81 | return msg 82 | 83 | def encode_header_address(s): 84 | return addr_re.sub(_addr_submatch, s) 85 | 86 | def encode_header(s): 87 | return Header(s, 'utf-8').encode() if not eight_bit_clean(s) else s 88 | 89 | def decode_header(h): 90 | var = email.header.decode_header(h)[0] 91 | charset = var[1] or 'ascii' 92 | if charset.lower() == 'gb2312': #fxxk 93 | charset = 'gb18030' 94 | try: 95 | var = var[0].decode(charset) 96 | except AttributeError: 97 | var = var[0] 98 | except LookupError: 99 | var = var[0].decode('utf-8', errors='replace') 100 | return var 101 | 102 | def _addr_submatch(m): 103 | return encode_header(m.group(1)) + ' ' + m.group(2) + m.group(3) 104 | 105 | def eight_bit_clean(s): 106 | return all(ord(c) < 128 for c in s) 107 | 108 | def get_charset_from_ctype(ctype): 109 | pos = ctype.find('charset=') 110 | if pos > 0: 111 | charset = ctype[pos+8:] 112 | if charset.lower() == 'gb2312': 113 | # Windows misleadingly uses gb2312 when it's gbk or gb18030 114 | charset = 'gb18030' 115 | elif charset.lower() == 'windows-31j': 116 | # cp932's IANA name (Windows-31J), extended shift_jis 117 | # https://en.wikipedia.org/wiki/Code_page_932 118 | charset = 'cp932' 119 | return charset 120 | 121 | def sendmail(mail): 122 | s = smtplib.SMTP() 123 | s.connect() 124 | s.send_message(mail) 125 | s.quit() 126 | 127 | def save_html_mail(msg): 128 | import os 129 | import tempfile 130 | 131 | basedir = tempfile.mkdtemp() 132 | 133 | def save_file(fname, content): 134 | fname = os.path.join(basedir, fname) 135 | if isinstance(content, str): 136 | f = open(fname, 'w') 137 | else: 138 | f = open(fname, 'wb') 139 | f.write(content) 140 | 141 | def name_gen(): 142 | i = 1 143 | while True: 144 | yield str(i) 145 | i += 1 146 | name_it = name_gen() 147 | 148 | m = msg 149 | title = decode_header(m['Subject']) 150 | mailtype = m.get_content_type() 151 | if mailtype == 'multipart/alternative': 152 | mainMail = [m for m in m.get_payload() 153 | if m.get_content_type() == 'text/html'][0] 154 | mailbody = decode_payload(mainMail) 155 | elif mailtype in ('multipart/related', 'multipart/mixed'): 156 | mails = m.get_payload() 157 | cidMapping = {} 158 | for mail in mails: 159 | if mail.get_content_type() == 'multipart/alternative': 160 | mainMail = [m for m in mail.get_payload() 161 | if m.get_content_type() == 'text/html'][0] 162 | mailbody = decode_payload(mainMail) 163 | elif mail.get_content_type().startswith('text/html'): 164 | mailbody = decode_payload(mail) 165 | else: 166 | try: 167 | cid = mail['Content-ID'][1:-1] 168 | except TypeError: 169 | if mail['Content-Disposition'] and \ 170 | mail['Content-Disposition'].find('attachment') != -1: 171 | continue 172 | raise 173 | fname = decode_header(mail.get_filename() or next(name_it)) 174 | cidMapping[cid] = fname 175 | body = decode_payload(mail, binary=True) 176 | save_file(fname, body) 177 | elif mailtype == 'text/html': 178 | mailbody = decode_payload(m) 179 | else: 180 | raise NotImplementedError('type %s not recognized' % mailtype) 181 | 182 | from lxml.html import fromstring, tostring 183 | from lxml.html import builder as E 184 | 185 | div = fromstring(mailbody) 186 | for cidLink in div.xpath('//*[starts-with(@src, "cid:")]'): 187 | cid = cidLink.get('src')[4:] 188 | cidLink.set('src', cidMapping[cid]) 189 | div.insert(0, E.TITLE(title)) 190 | div.insert(0, E.META(charset='utf-8')) 191 | mailbody_b = tostring(div, encoding='utf-8') 192 | save_file('index.html', mailbody_b) 193 | 194 | return os.path.join(basedir, 'index.html') 195 | 196 | -------------------------------------------------------------------------------- /lilac2/vendor/myutils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os, sys 4 | import re 5 | import datetime 6 | import time 7 | from functools import lru_cache, wraps 8 | import logging 9 | import contextlib 10 | import signal 11 | import hashlib 12 | import base64 13 | import fcntl 14 | from typing import Tuple, Union, Optional, Dict, Any, Generator 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | def safe_overwrite(fname: str, data: Union[bytes, str], *, 19 | method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None: 20 | # FIXME: directory has no read perm 21 | # FIXME: symlinks and hard links 22 | tmpname = fname + '.tmp' 23 | # if not using "with", write can fail without exception 24 | with open(tmpname, mode, encoding=encoding) as f: 25 | getattr(f, method)(data) 26 | # see also: https://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/ 27 | f.flush() 28 | os.fsync(f.fileno()) 29 | # if the above write failed (because disk is full etc), the old data should be kept 30 | os.rename(tmpname, fname) 31 | 32 | UNITS = 'KMGTPEZY' 33 | 34 | def filesize(size: int) -> str: 35 | amt, unit = filesize_ex(size) 36 | if unit: 37 | return '%.1f%siB' % (amt, unit) 38 | else: 39 | return '%dB' % amt 40 | 41 | def filesize_ex(size: int) -> Tuple[Union[float, int], str]: 42 | left: Union[int, float] = abs(size) 43 | unit = -1 44 | n = len(UNITS) 45 | while left > 1100 and unit < n: 46 | left = left / 1024 47 | unit += 1 48 | if unit == -1: 49 | return size, '' 50 | else: 51 | if size < 0: 52 | left = -left 53 | return left, UNITS[unit] 54 | 55 | class FileSize(int): 56 | def __str__(self) -> str: 57 | return filesize(self).rstrip('iB') 58 | 59 | def parse_filesize(s: str) -> int: 60 | s1 = s.rstrip('iB') 61 | if not s1: 62 | raise ValueError(s) 63 | 64 | last = s1[-1] 65 | try: 66 | idx = UNITS.index(last) 67 | except ValueError: 68 | return int(float(s1)) 69 | 70 | v = float(s1[:-1]) * 1024 ** (idx+1) 71 | return int(v) 72 | 73 | def humantime(t: int) -> str: 74 | '''seconds -> XhYmZs''' 75 | if t < 0: 76 | sign = '-' 77 | t = -t 78 | else: 79 | sign = '' 80 | 81 | m, s = divmod(t, 60) 82 | h, m = divmod(m, 60) 83 | d, h = divmod(h, 24) 84 | ret = '' 85 | if d: 86 | ret += '%dd' % d 87 | if h: 88 | ret += '%dh' % h 89 | if m: 90 | ret += '%dm' % m 91 | if s: 92 | ret += '%ds' % s 93 | if not ret: 94 | ret = '0s' 95 | return sign + ret 96 | 97 | def dehumantime(s: str) -> int: 98 | '''XhYmZs -> seconds''' 99 | m = re.match(r'(?:(?P\d+)d)?(?:(?P\d+)h)?(?:(?P\d+)m)?(?:(?P\d+)s)?$', s) 100 | if m: 101 | return ( 102 | int(m.group('d') or 0) * 3600 * 24 + 103 | int(m.group('h') or 0) * 3600 + 104 | int(m.group('m') or 0) * 60 + 105 | int(m.group('s') or 0) 106 | ) 107 | else: 108 | raise ValueError(s) 109 | 110 | def _timed_read(file, timeout): 111 | from select import select 112 | if select([file], [], [], timeout)[0]: 113 | return file.read(1) 114 | 115 | def getchar( 116 | prompt: str, 117 | hidden: bool = False, 118 | end: str = '\n', 119 | timeout: Optional[float] = None, 120 | ): 121 | '''读取一个字符''' 122 | import termios 123 | sys.stdout.write(prompt) 124 | sys.stdout.flush() 125 | fd = sys.stdin.fileno() 126 | ch: Optional[str] 127 | 128 | def _read() -> Optional[str]: 129 | ch: Optional[str] 130 | if timeout is None: 131 | ch = sys.stdin.read(1) 132 | else: 133 | ch = _timed_read(sys.stdin, timeout) 134 | return ch 135 | 136 | if os.isatty(fd): 137 | old = termios.tcgetattr(fd) 138 | new = termios.tcgetattr(fd) 139 | if hidden: 140 | new[3] = new[3] & ~termios.ICANON & ~termios.ECHO 141 | else: 142 | new[3] = new[3] & ~termios.ICANON 143 | new[6][termios.VMIN] = 1 144 | new[6][termios.VTIME] = 0 145 | try: 146 | termios.tcsetattr(fd, termios.TCSANOW, new) 147 | termios.tcsendbreak(fd, 0) 148 | ch = _read() 149 | finally: 150 | termios.tcsetattr(fd, termios.TCSAFLUSH, old) 151 | else: 152 | ch = _read() 153 | 154 | sys.stdout.write(end) 155 | return ch 156 | 157 | def loadso(fname): 158 | '''ctypes.CDLL 的 wrapper,从 sys.path 中搜索文件''' 159 | from ctypes import CDLL 160 | 161 | for d in sys.path: 162 | p = os.path.join(d, fname) 163 | if os.path.exists(p): 164 | return CDLL(p) 165 | raise ImportError('%s not found' % fname) 166 | 167 | def dofile(path): 168 | G = {} 169 | with open(path) as f: 170 | exec(f.read(), G) 171 | return G 172 | 173 | def restart_if_failed(func, max_tries, args=(), kwargs={}, secs=60, sleep=None): 174 | ''' 175 | re-run when some exception happens, until `max_tries` in `secs` 176 | ''' 177 | import traceback 178 | from collections import deque 179 | 180 | dq = deque(maxlen=max_tries) 181 | while True: 182 | dq.append(time.time()) 183 | try: 184 | return func(*args, **kwargs) 185 | except Exception: 186 | traceback.print_exc() 187 | if len(dq) == max_tries and time.time() - dq[0] < secs: 188 | break 189 | if sleep is not None: 190 | time.sleep(sleep) 191 | else: 192 | break 193 | 194 | def daterange(start, stop=datetime.date.today(), step=datetime.timedelta(days=1)): 195 | d = start 196 | while d < stop: 197 | yield d 198 | d += step 199 | 200 | @lru_cache() 201 | def findfont(fontname): 202 | from subprocess import check_output 203 | out = check_output(['fc-match', '-v', fontname]).decode() 204 | for l in out.split('\n'): 205 | if l.lstrip().startswith('file:'): 206 | return l.split('"', 2)[1] 207 | 208 | def debugfunc(logger=logging, *, _id=[0]): 209 | def w(func): 210 | @wraps(func) 211 | def wrapper(*args, **kwargs): 212 | myid = _id[0] 213 | _id[0] += 1 214 | logger.debug('[func %d] %s(%r, %r)', myid, func.__name__, args, kwargs) 215 | ret = func(*args, **kwargs) 216 | logger.debug('[func %d] return: %r', myid, ret) 217 | return ret 218 | return wrapper 219 | return w 220 | 221 | @contextlib.contextmanager 222 | def execution_timeout(timeout): 223 | def timed_out(signum, sigframe): 224 | raise TimeoutError 225 | 226 | delay, interval = signal.setitimer(signal.ITIMER_REAL, timeout, 0) 227 | old_hdl = signal.signal(signal.SIGALRM, timed_out) 228 | now = time.time() 229 | try: 230 | yield 231 | finally: 232 | # inner timeout must be smaller, or the timer event will be delayed 233 | if delay: 234 | elapsed = time.time() - now 235 | delay = max(delay - elapsed, 0.000001) 236 | else: 237 | delay = 0 238 | signal.setitimer(signal.ITIMER_REAL, delay, interval) 239 | signal.signal(signal.SIGALRM, old_hdl) 240 | 241 | def find_executables(name, path=None): 242 | '''find all matching executables with specific name in path''' 243 | if path is None: 244 | path = os.environ['PATH'].split(os.pathsep) 245 | elif isinstance(path, str): 246 | path = path.split(os.pathsep) 247 | path = [p for p in path if os.path.isdir(p)] 248 | 249 | return [os.path.join(p, f) for p in path for f in os.listdir(p) if f == name] 250 | 251 | # The following three are learnt from makepkg 252 | def user_choose(prompt, timeout=None): 253 | # XXX: hard-coded term characters are ok? 254 | prompt = '\x1b[1;34m::\x1b[1;37m %s\x1b[0m ' % prompt 255 | return getchar(prompt, timeout=timeout) 256 | 257 | def msg(msg): 258 | # XXX: hard-coded term characters are ok? 259 | print('\x1b[1;32m==>\x1b[1;37m %s\x1b[0m' % msg) 260 | 261 | def msg2(msg): 262 | # XXX: hard-coded term characters are ok? 263 | print('\x1b[1;34m ->\x1b[1;37m %s\x1b[0m' % msg) 264 | 265 | def is_internal_ip(ip): 266 | import ipaddress 267 | ip = ipaddress.ip_address(ip) 268 | return ip.is_loopback or ip.is_private or ip.is_reserved or ip.is_link_local 269 | 270 | @contextlib.contextmanager 271 | def at_dir(d: os.PathLike) -> Generator[None, None, None]: 272 | old_dir = os.getcwd() 273 | os.chdir(d) 274 | try: 275 | yield 276 | finally: 277 | os.chdir(old_dir) 278 | 279 | def firstExistentPath(paths): 280 | for p in paths: 281 | if os.path.exists(p): 282 | return p 283 | 284 | def md5sum_of_file(file): 285 | with open(file, 'rb') as f: 286 | m = hashlib.md5() 287 | while True: 288 | d = f.read(81920) 289 | if not d: 290 | break 291 | m.update(d) 292 | return m.hexdigest() 293 | 294 | def md5(s, encoding='utf-8'): 295 | m = hashlib.md5() 296 | m.update(s.encode(encoding)) 297 | return m.hexdigest() 298 | 299 | def base64_encode(s): 300 | if isinstance(s, str): 301 | s = s.encode() 302 | return base64.b64encode(s).decode('ascii') 303 | 304 | def lock_file(path: os.PathLike) -> None: 305 | lock = os.open(path, os.O_WRONLY | os.O_CREAT, 0o600) 306 | try: 307 | fcntl.flock(lock, fcntl.LOCK_EX|fcntl.LOCK_NB) 308 | except BlockingIOError: 309 | logger.warning('Waiting for lock to release...') 310 | fcntl.flock(lock, fcntl.LOCK_EX) 311 | 312 | @contextlib.contextmanager 313 | def file_lock(file: os.PathLike) -> Generator[None, None, None]: 314 | lock = os.open(file, os.O_WRONLY | os.O_CREAT, 0o600) 315 | try: 316 | fcntl.flock(lock, fcntl.LOCK_EX) 317 | yield 318 | finally: 319 | os.close(lock) 320 | 321 | def dict_bytes_to_str(d: Dict[Any, Any]) -> Dict[Any, Any]: 322 | ret = {} 323 | for k, v in d.items(): 324 | if isinstance(k, bytes): 325 | with contextlib.suppress(UnicodeDecodeError): 326 | k = k.decode() 327 | 328 | if isinstance(v, bytes): 329 | with contextlib.suppress(UnicodeDecodeError): 330 | v = v.decode() 331 | elif isinstance(v, dict): 332 | v = dict_bytes_to_str(v) 333 | elif isinstance(v, list): 334 | with contextlib.suppress(UnicodeDecodeError): 335 | v = [x.decode() for x in v] 336 | 337 | ret[k] = v 338 | 339 | return ret 340 | 341 | def xsel(input=None): 342 | import subprocess 343 | 344 | if input is None: 345 | return subprocess.getoutput('uniclip') 346 | else: 347 | p = subprocess.Popen(['uniclip', '-i'], stdin=subprocess.PIPE) 348 | p.communicate(input.encode()) 349 | return p.wait() 350 | -------------------------------------------------------------------------------- /lilac2/vendor/nicelogger.py: -------------------------------------------------------------------------------- 1 | ''' 2 | A Tornado-inspired logging formatter, with displayed time with millisecond accuracy 3 | 4 | FYI: pyftpdlib also has a Tornado-style logger. 5 | ''' 6 | 7 | from __future__ import annotations 8 | 9 | import sys 10 | import time 11 | import logging 12 | 13 | class TornadoLogFormatter(logging.Formatter): 14 | def __init__(self, color, *args, **kwargs): 15 | super().__init__(*args, **kwargs) 16 | self._color = color 17 | if color: 18 | import curses 19 | curses.setupterm() 20 | if sys.hexversion < 0x30203f0: 21 | fg_color = str(curses.tigetstr("setaf") or 22 | curses.tigetstr("setf") or "", "ascii") 23 | else: 24 | fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" 25 | self._colors = { 26 | logging.DEBUG: str(curses.tparm(fg_color, 4), # Blue 27 | "ascii"), 28 | logging.INFO: str(curses.tparm(fg_color, 2), # Green 29 | "ascii"), 30 | logging.WARNING: str(curses.tparm(fg_color, 3), # Yellow 31 | "ascii"), 32 | logging.ERROR: str(curses.tparm(fg_color, 1), # Red 33 | "ascii"), 34 | logging.CRITICAL: str(curses.tparm(fg_color, 9), # Bright Red 35 | "ascii"), 36 | } 37 | self._normal = str(curses.tigetstr("sgr0"), "ascii") 38 | 39 | def format(self, record): 40 | try: 41 | record.message = record.getMessage() 42 | except Exception as e: 43 | record.message = "Bad message (%r): %r" % (e, record.__dict__) 44 | record.asctime = time.strftime( 45 | "%m-%d %H:%M:%S", self.converter(record.created)) 46 | prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \ 47 | record.__dict__ 48 | if self._color: 49 | prefix = (self._colors.get(record.levelno, self._normal) + 50 | prefix + self._normal) 51 | formatted = prefix + " " + record.message 52 | 53 | formatted += ''.join( 54 | ' %s=%s' % (k, v) for k, v in record.__dict__.items() 55 | if k not in { 56 | 'levelname', 'asctime', 'module', 'lineno', 'args', 'message', 57 | 'filename', 'exc_info', 'exc_text', 'created', 'funcName', 58 | 'processName', 'process', 'msecs', 'relativeCreated', 'thread', 59 | 'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info', 60 | 'taskName', 61 | }) 62 | 63 | if record.exc_info: 64 | if not record.exc_text: 65 | record.exc_text = self.formatException(record.exc_info) 66 | if record.exc_text: 67 | formatted = formatted.rstrip() + "\n" + record.exc_text 68 | return formatted.replace("\n", "\n ") 69 | 70 | def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None): 71 | ''' 72 | handler: specify a handler instead of default StreamHandler 73 | color: boolean, force color to be on / off. Default to be on only when 74 | ``handler`` isn't specified and the term supports color 75 | ''' 76 | logger = logging.getLogger() 77 | if handler is None: 78 | h = logging.StreamHandler() 79 | else: 80 | h = handler 81 | if color is None: 82 | color = False 83 | if handler is None and sys.stderr.isatty(): 84 | try: 85 | import curses 86 | curses.setupterm() 87 | if curses.tigetnum("colors") > 0: 88 | color = True 89 | except: 90 | import traceback 91 | traceback.print_exc() 92 | formatter = TornadoLogFormatter(color=color) 93 | h.setLevel(level) 94 | h.setFormatter(formatter) 95 | logger.setLevel(level) 96 | logger.addHandler(h) 97 | -------------------------------------------------------------------------------- /lilac2/vendor/requestsutils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from http.cookiejar import MozillaCookieJar 5 | from urllib.parse import urljoin 6 | from typing import Optional, BinaryIO 7 | 8 | import requests 9 | 10 | CHUNK_SIZE = 40960 11 | 12 | def download_into(session: requests.Session, 13 | url: str, file: BinaryIO, process_func=None) -> None: 14 | r = session.get(url, stream=True) 15 | length = int(r.headers.get('Content-Length') or 0) 16 | received = 0 17 | for chunk in r.iter_content(CHUNK_SIZE): 18 | received += len(chunk) 19 | file.write(chunk) 20 | if process_func: 21 | process_func(received, length) 22 | if not length and process_func: 23 | process_func(received, received) 24 | 25 | def download_into_with_progressbar(url, dest): 26 | import time 27 | from functools import partial 28 | from termutils import download_process 29 | 30 | w = os.get_terminal_size()[1] 31 | with open(dest, 'wb') as f: 32 | download_into(requests, url, f, partial( 33 | download_process, dest, time.time(), width=w)) 34 | 35 | class RequestsBase: 36 | _session = None 37 | __our_session: bool = False 38 | userAgent: Optional[str] = None 39 | lasturl: Optional[str] = None 40 | auto_referer: bool = False 41 | baseurl: Optional[str] = None 42 | 43 | @property 44 | def session(self): 45 | if not self._session: 46 | s = requests.Session() 47 | self.__our_session = True 48 | self._session = s 49 | return self._session 50 | 51 | def __init__(self, *, baseurl=None, cookiefile=None, session=None): 52 | if baseurl is not None: 53 | self.baseurl = baseurl 54 | self._session = session 55 | 56 | s = self.session 57 | if cookiefile: 58 | s.cookies = MozillaCookieJar(cookiefile) 59 | if os.path.exists(cookiefile): 60 | s.cookies.load() 61 | 62 | self._has_cookiefile = bool(cookiefile) 63 | self.initialize() 64 | 65 | def initialize(self) -> None: 66 | '''subclasss can override this to change initialization behavior.''' 67 | pass 68 | 69 | def __del__(self): 70 | if self._has_cookiefile: 71 | self.session.cookies.save() 72 | if self.__our_session: 73 | self._session.close() 74 | 75 | def request(self, url: str, method: Optional[str] = None, *args, **kwargs 76 | ) -> requests.Response: 77 | if self.baseurl: 78 | url = urljoin(self.baseurl, url) 79 | 80 | h = kwargs.get('headers', None) 81 | if not h: 82 | h = kwargs['headers'] = {} 83 | if self.userAgent: 84 | h.setdefault('User-Agent', self.userAgent) 85 | if self.auto_referer and self.lasturl: 86 | h.setdefault('Referer', self.lasturl) 87 | 88 | if method is None: 89 | if 'data' in kwargs or 'files' in kwargs or 'json' in kwargs: 90 | method = 'post' 91 | else: 92 | method = 'get' 93 | 94 | response = self.session.request(method, url, *args, **kwargs) 95 | # url may have been changed due to redirection 96 | self.lasturl = response.url 97 | return response 98 | 99 | if __name__ == '__main__': 100 | from sys import argv, exit 101 | 102 | if len(argv) != 3: 103 | exit('URL and output file not given.') 104 | 105 | try: 106 | download_into_with_progressbar(argv[1], argv[2]) 107 | except KeyboardInterrupt: 108 | exit(2) 109 | -------------------------------------------------------------------------------- /lilac2/vendor/serializer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import abc 5 | 6 | import pickle 7 | 8 | from .myutils import safe_overwrite 9 | 10 | class Serializer(metaclass=abc.ABCMeta): 11 | def __init__(self, fname, readonly=False, default=None): 12 | ''' 13 | 读取文件fname。readonly指定析构时不回存数据 14 | 如果数据已加锁,将会抛出SerializerError异常 15 | default 指出如果文件不存在或为空时的数据 16 | 17 | 注意: 18 | 要正确地写回数据,需要保证此对象在需要写回时依旧存在,或者使用with语句 19 | 将自身存入其data属性中不可行,原因未知 20 | ''' 21 | self.fname = os.path.abspath(fname) 22 | if readonly: 23 | self.lock = None 24 | else: 25 | dir, file = os.path.split(self.fname) 26 | self.lock = os.path.join(dir, '.%s.lock' % file) 27 | for i in (1,): 28 | # 处理文件锁 29 | if os.path.exists(self.lock): 30 | try: 31 | pid = int(open(self.lock).read()) 32 | except ValueError: 33 | break 34 | 35 | try: 36 | os.kill(pid, 0) 37 | except OSError: 38 | break 39 | else: 40 | self.lock = None 41 | raise SerializerError('数据已加锁') 42 | with open(self.lock, 'w') as f: 43 | f.write(str(os.getpid())) 44 | 45 | try: 46 | self.load() 47 | except EOFError: 48 | self.data = default 49 | except IOError as e: 50 | if e.errno == 2 and not readonly: #文件不存在 51 | self.data = default 52 | else: 53 | raise 54 | 55 | def __del__(self): 56 | '''如果需要,删除 lock,保存文件''' 57 | if self.lock: 58 | self.save() 59 | os.unlink(self.lock) 60 | 61 | def __enter__(self): 62 | return self.data 63 | 64 | def __exit__(self, exc_type, exc_value, traceback): 65 | pass 66 | 67 | @abc.abstractmethod 68 | def load(self): 69 | pass 70 | 71 | @abc.abstractmethod 72 | def save(self): 73 | pass 74 | 75 | class PickledData(Serializer): 76 | def save(self): 77 | data = pickle.dumps(self.data) 78 | safe_overwrite(self.fname, data, mode='wb') 79 | 80 | def load(self): 81 | self.data = pickle.load(open(self.fname, 'rb')) 82 | 83 | class SerializerError(Exception): pass 84 | 85 | if __name__ == '__main__': 86 | # For testing purpose 87 | import tempfile 88 | f = tempfile.mkstemp()[1] 89 | testData = {'sky': 1000, 'kernel': -1000} 90 | try: 91 | with PickledData(f, default=testData) as p: 92 | print(p) 93 | p['space'] = 10000 94 | print(p) 95 | finally: 96 | os.unlink(f) 97 | -------------------------------------------------------------------------------- /lilac2/worker.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import logging 5 | import subprocess 6 | from typing import Optional, List, Generator, Union 7 | from types import SimpleNamespace 8 | import contextlib 9 | import json 10 | import sys 11 | from pathlib import Path 12 | import platform 13 | 14 | import pyalpm 15 | 16 | from .vendor.nicelogger import enable_pretty_logging 17 | from .vendor.myutils import file_lock 18 | 19 | from . import pkgbuild 20 | from .typing import LilacMod, LilacInfo, Cmd, OnBuildVers 21 | from .cmd import run_cmd, UNTRUSTED_PREFIX 22 | from .api import ( 23 | vcs_update, get_pkgver_and_pkgrel, update_pkgrel, 24 | _next_pkgrel, 25 | ) 26 | from .nvchecker import NvResults 27 | from .tools import kill_child_processes 28 | from .lilacpy import load_lilac 29 | from .lilacyaml import load_lilacinfo 30 | from .const import _G, PACMAN_DB_DIR, mydir 31 | from .repo import Repo 32 | from . import intl 33 | 34 | logger = logging.getLogger(__name__) 35 | 36 | class SkipBuild(Exception): 37 | def __init__(self, msg: str) -> None: 38 | self.msg = msg 39 | 40 | @contextlib.contextmanager 41 | def may_update_pkgrel() -> Generator[None, None, None]: 42 | pkgver, pkgrel = get_pkgver_and_pkgrel() 43 | yield 44 | 45 | if pkgver is None or pkgrel is None: 46 | return 47 | 48 | pkgver2, pkgrel2 = get_pkgver_and_pkgrel() 49 | if pkgver2 is None or pkgrel2 is None: 50 | return 51 | 52 | if pkgver == pkgver2 and \ 53 | pyalpm.vercmp(f'1-{pkgrel}', f'1-{pkgrel2}') >= 0: 54 | try: 55 | update_pkgrel(_next_pkgrel(pkgrel)) 56 | except ValueError: 57 | # pkgrel is not a number, resetting to 1 58 | update_pkgrel(1) 59 | 60 | def lilac_build( 61 | worker_no: int, 62 | mod: LilacMod, 63 | depend_packages: list[str] = [], 64 | build_prefix: Optional[str] = None, 65 | update_info: NvResults = NvResults(), 66 | on_build_vers: OnBuildVers = [], 67 | bindmounts: list[str] = [], 68 | tmpfs: list[str] = [], 69 | ) -> None: 70 | success = False 71 | _G.built_version = None 72 | 73 | try: 74 | oldver = update_info.oldver 75 | newver = update_info.newver 76 | 77 | if not hasattr(mod, '_G'): 78 | # fill nvchecker result unless already filled (e.g. by hand) 79 | mod._G = SimpleNamespace( 80 | oldver = oldver, 81 | newver = newver, 82 | oldvers = [x.oldver for x in update_info], 83 | newvers = [x.newver for x in update_info], 84 | on_build_vers = [tuple(x) for x in on_build_vers], 85 | ) 86 | 87 | prepare = getattr(mod, 'prepare', None) 88 | if prepare is not None: 89 | msg = prepare() 90 | if isinstance(msg, str): 91 | raise SkipBuild(msg) 92 | 93 | run_cmd(["sh", "-c", "rm -f -- *.pkg.tar.xz *.pkg.tar.xz.sig *.pkg.tar.zst *.pkg.tar.zst.sig"]) 94 | pre_build = getattr(mod, 'pre_build', None) 95 | 96 | with may_update_pkgrel(): 97 | if pre_build is not None: 98 | logger.debug('oldver=%r, newver=%r', oldver, newver) 99 | pre_build() 100 | run_cmd(['recv_gpg_keys']) 101 | vcs_update() 102 | 103 | pkgvers = pkgbuild.check_srcinfo() 104 | _G.built_version = str(pkgvers) 105 | 106 | default_build_prefix = 'extra-%s' % (platform.machine() or 'x86_64') 107 | build_prefix = build_prefix or getattr( 108 | mod, 'build_prefix', default_build_prefix) 109 | if not isinstance(build_prefix, str): 110 | raise TypeError('build_prefix', build_prefix) 111 | 112 | build_args: List[str] = [] 113 | if hasattr(mod, 'build_args'): 114 | build_args = mod.build_args 115 | 116 | makechrootpkg_args = ['-l', f'lilac-{worker_no}'] 117 | if hasattr(mod, 'makechrootpkg_args'): 118 | makechrootpkg_args.extend(mod.makechrootpkg_args) 119 | 120 | makepkg_args = ['--noprogressbar'] 121 | if hasattr(mod, 'makepkg_args'): 122 | makepkg_args.extend(mod.makepkg_args) 123 | 124 | call_build_cmd( 125 | build_prefix, depend_packages, bindmounts, tmpfs, 126 | build_args, makechrootpkg_args, makepkg_args, 127 | ) 128 | 129 | pkgs = [x for x in os.listdir() if x.endswith(('.pkg.tar.xz', '.pkg.tar.zst'))] 130 | if not pkgs: 131 | raise Exception('no package built') 132 | post_build = getattr(mod, 'post_build', None) 133 | if post_build is not None: 134 | with file_lock(mydir / 'post_build.lock'): 135 | post_build() 136 | success = True 137 | 138 | finally: 139 | post_build_always = getattr(mod, 'post_build_always', None) 140 | if post_build_always is not None: 141 | post_build_always(success=success) 142 | 143 | def call_build_cmd( 144 | build_prefix: str, depends: List[str], 145 | bindmounts: list[str] = [], 146 | tmpfs: list[str] = [], 147 | build_args: list[str] = [], 148 | makechrootpkg_args: List[str] = [], 149 | makepkg_args: List[str] = [], 150 | ) -> None: 151 | cmd: Cmd 152 | if build_prefix == 'makepkg': 153 | pwd = os.getcwd() 154 | basename = os.path.basename(pwd) 155 | extra_args = ['--share-net', '--bind', pwd, f'/tmp/{basename}', '--chdir', f'/tmp/{basename}'] 156 | cmd = UNTRUSTED_PREFIX + extra_args + ['makepkg', '--holdver'] # type: ignore 157 | else: 158 | gpghome = os.path.expanduser('~/.lilac/gnupg') 159 | cmd = ['env', f'GNUPGHOME={gpghome}', '%s-build' % build_prefix] 160 | cmd.extend(build_args) 161 | cmd.append('--') 162 | 163 | for x in depends: 164 | cmd += ['-I', x] 165 | 166 | for b in bindmounts: 167 | # need to make sure source paths exist 168 | # See --bind in systemd-nspawn(1) for bindmount spec details 169 | # Note that this check does not consider all possible formats 170 | source_dir = b.split(':')[0] 171 | if not os.path.exists(source_dir): 172 | os.makedirs(source_dir) 173 | cmd += ['-d', b] 174 | 175 | for t in tmpfs: 176 | cmd += ['-t', t] 177 | 178 | cmd.extend(makechrootpkg_args) 179 | cmd.extend(['--']) 180 | cmd.extend(makepkg_args) 181 | cmd.extend(['--holdver']) 182 | 183 | # NOTE that Ctrl-C here may not succeed 184 | run_build_cmd(cmd) 185 | 186 | def run_build_cmd(cmd: Cmd) -> None: 187 | logger.info('Running build command: %r', cmd) 188 | 189 | p = subprocess.Popen( 190 | cmd, 191 | stdin = subprocess.DEVNULL, 192 | ) 193 | 194 | while True: 195 | try: 196 | code = p.wait(10) 197 | except subprocess.TimeoutExpired: 198 | st = os.stat(1) 199 | if st.st_size > 1024 ** 3: # larger than 1G 200 | kill_child_processes() 201 | logger.error('\n\nToo much output, killed.') 202 | else: 203 | if code != 0: 204 | raise subprocess.CalledProcessError(code, cmd) 205 | break 206 | 207 | def main() -> None: 208 | enable_pretty_logging('DEBUG') 209 | 210 | from .tools import read_config 211 | config = read_config() 212 | repo = _G.repo = Repo(config) 213 | pkgbuild.load_data(PACMAN_DB_DIR) 214 | 215 | input = json.load(sys.stdin) 216 | logger.debug('got input: %r', input) 217 | 218 | _G.commit_msg_template = input['commit_msg_template'] 219 | 220 | try: 221 | with load_lilac(Path('.')) as mod: 222 | _G.mod = mod 223 | lilac_build( 224 | worker_no = input['worker_no'], 225 | mod = mod, 226 | depend_packages = input['depend_packages'], 227 | update_info = NvResults.from_list(input['update_info']), 228 | on_build_vers = input.get('on_build_vers', []), 229 | bindmounts = input['bindmounts'], 230 | tmpfs = input['tmpfs'], 231 | ) 232 | r = {'status': 'done'} 233 | except SkipBuild as e: 234 | r = { 235 | 'status': 'skipped', 236 | 'msg': e.msg, 237 | } 238 | except Exception as e: 239 | r = { 240 | 'status': 'failed', 241 | 'msg': repr(e), 242 | } 243 | sys.stdout.flush() 244 | try: 245 | handle_failure(e, repo, mod, Path(input['logfile'])) 246 | except UnboundLocalError: 247 | # mod failed to load 248 | info = load_lilacinfo(Path('.')) 249 | handle_failure(e, repo, info, Path(input['logfile'])) 250 | except KeyboardInterrupt: 251 | logger.info('KeyboardInterrupt received') 252 | r = { 253 | 'status': 'failed', 254 | 'msg': 'KeyboardInterrupt', 255 | } 256 | finally: 257 | # say goodbye to all our children 258 | kill_child_processes() 259 | 260 | r['version'] = getattr(_G, 'built_version', None) # type: ignore 261 | 262 | with open(input['result'], 'w') as f: 263 | json.dump(r, f) 264 | 265 | def handle_failure( 266 | e: Exception, repo: Repo, mod: Union[LilacMod, LilacInfo], logfile: Path, 267 | ) -> None: 268 | logger.error('build failed', exc_info=e) 269 | l10n = intl.get_l10n('mail') 270 | 271 | if isinstance(e, pkgbuild.ConflictWithOfficialError): 272 | reason = '' 273 | if e.groups: 274 | reason += l10n.format_value('package-in-official-group', {'groups': repr(e.groups)}) + '\n' 275 | if e.packages: 276 | reason += l10n.format_value('package-replacing-official-package', {'packages': repr(e.packages)}) + '\n' 277 | subj = l10n.format_value('package-conflicts-with-official-repos') 278 | repo.send_error_report( 279 | mod, subject = subj, msg = reason, 280 | ) 281 | 282 | elif isinstance(e, pkgbuild.DowngradingError): 283 | repo.send_error_report( 284 | mod, 285 | subject = l10n.format_value('package-older-subject'), 286 | msg = l10n.format_value('package-older-body', { 287 | 'pkg': e.pkgname, 288 | 'built_version': e.built_version, 289 | 'repo_version': e.repo_version, 290 | }) + '\n', 291 | ) 292 | 293 | else: 294 | repo.send_error_report(mod, exc=e, logfile=logfile) 295 | 296 | if __name__ == '__main__': 297 | main() 298 | -------------------------------------------------------------------------------- /lilaclib.py: -------------------------------------------------------------------------------- 1 | from lilac2.api import * 2 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | warn_unused_configs = True 3 | warn_redundant_casts = True 4 | warn_unused_ignores = True 5 | show_error_context = True 6 | show_column_numbers = True 7 | no_implicit_optional = True 8 | 9 | [mypy-pyalpm] 10 | ignore_missing_imports = True 11 | 12 | [mypy-termutils] 13 | ignore_missing_imports = True 14 | 15 | [mypy-prctl] 16 | ignore_missing_imports = True 17 | 18 | [mypy-tomli_w] 19 | ignore_missing_imports = True 20 | 21 | -------------------------------------------------------------------------------- /nvchecker_source/README.rst: -------------------------------------------------------------------------------- 1 | Version Control System (VCS) (git, hg, svn, bzr) 2 | ------------------------------------------------ 3 | :: 4 | 5 | source = "vcs" 6 | 7 | Check a VCS repo for new commits. The version returned is currently not related to the version of the software and will increase whenever the referred VCS branch changes. This is mainly for Arch Linux. 8 | 9 | vcs 10 | The url of the remote VCS repo, using the same syntax with a VCS url in PKGBUILD (`Pacman`_'s build script). The first VCS url found in the source array of the PKGBUILD will be used if this option is omitted. (Note: for a blank ``vcs`` setting to work correctly, the PKGBUILD has to be in a directory with the name of the software under the path where nvchecker is run. Also, all the commands, if any, needed when sourcing the PKGBUILD need to be installed). 11 | 12 | use_max_tag 13 | Set this to ``true`` to check for the max tag. Currently only supported for ``git``. 14 | This option returns the biggest tag sorted by ``pkg_resources.parse_version``. 15 | 16 | This source supports `list options`_ when ``use_max_tag`` is set. 17 | 18 | .. _list options: https://github.com/lilydjwg/nvchecker#list-options 19 | 20 | R packages from CRAN and Bioconductor 21 | ------------------------------------- 22 | :: 23 | 24 | source = "rpkgs" 25 | 26 | Check versions from CRAN and Bioconductor. This source is optimized for checking large amounts of packages at once. If you want to check only a few, the ``cran`` source is better for CRAN packages. 27 | 28 | pkgname 29 | Name of the R package. 30 | 31 | repo 32 | The repo of the package. Possible values are ``cran``, ``bioc``, ``bioc-data-annotation``, ``bioc-data-experiment`` and ``bioc-workflows``. 33 | 34 | md5 35 | If set to ``true``, a ``#`` character and the md5sum of the source archive is appended to the version. Defaults to ``false``. 36 | -------------------------------------------------------------------------------- /nvchecker_source/archfiles.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from nvchecker.api import GetVersionError 4 | 5 | PKG_URL = 'https://archlinux.org/packages/%s/files/json/' 6 | 7 | async def get_version(name, conf, *, cache, **kwargs): 8 | key = conf['pkgpart'] 9 | regex = re.compile(conf['filename']) 10 | j = await cache.get_json(PKG_URL % key) 11 | 12 | for f in j['files']: 13 | fn = f.rsplit('/', 1)[-1] 14 | if regex.fullmatch(fn): 15 | return fn 16 | 17 | raise GetVersionError('no file matches specified regex') 18 | -------------------------------------------------------------------------------- /nvchecker_source/rpkgs.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Tuple 2 | from zlib import decompress 3 | 4 | from nvchecker.api import GetVersionError, session 5 | 6 | BIOC_TEMPLATE = 'https://bioconductor.org/packages/release/%s/src/contrib/PACKAGES.gz' 7 | 8 | URL_MAP = { 9 | 'cran': 'https://cran.r-project.org/src/contrib/PACKAGES.gz', 10 | 'bioc': BIOC_TEMPLATE % 'bioc', 11 | 'bioc-data-annotation': BIOC_TEMPLATE % 'data/annotation', 12 | 'bioc-data-experiment': BIOC_TEMPLATE % 'data/experiment', 13 | 'bioc-workflows': BIOC_TEMPLATE % 'workflows', 14 | } 15 | 16 | PKG_FIELD = b'Package: ' 17 | VER_FIELD = b'Version: ' 18 | MD5_FIELD = b'MD5sum: ' 19 | 20 | PKG_FLEN = len(PKG_FIELD) 21 | VER_FLEN = len(VER_FIELD) 22 | MD5_FLEN = len(MD5_FIELD) 23 | 24 | async def get_versions(repo: str) -> Dict[str, Tuple[str, str]]: 25 | url = URL_MAP.get(repo) 26 | if url is None: 27 | raise GetVersionError('Unknown repo', repo = repo) 28 | res = await session.get(url) 29 | data = decompress(res.body, wbits = 31) 30 | 31 | result = {} 32 | for section in data.split(b'\n\n'): 33 | pkg = ver = md5 = None 34 | for line in section.split(b'\n'): 35 | if line.startswith(PKG_FIELD): 36 | pkg = line[PKG_FLEN:].decode('utf8') 37 | elif line.startswith(VER_FIELD): 38 | ver = line[VER_FLEN:].decode('utf8') 39 | elif line.startswith(MD5_FIELD): 40 | md5 = line[MD5_FLEN:].decode('utf8') 41 | if pkg is None or ver is None or md5 is None: 42 | raise GetVersionError('Invalid package data', pkg = pkg, ver = ver, md5 = md5) 43 | result[pkg] = (ver, md5) 44 | 45 | return result 46 | 47 | async def get_version(name, conf, *, cache, **kwargs): 48 | pkgname = conf.get('pkgname', name) 49 | repo = conf['repo'] 50 | versions = await cache.get(repo, get_versions) 51 | data = versions.get(pkgname) 52 | if data is None: 53 | raise GetVersionError(f'Package {pkgname} not found in repo {repo}') 54 | add_md5 = conf.get('md5', False) 55 | ver, md5 = data 56 | return f'{ver}#{md5}' if add_md5 else ver 57 | -------------------------------------------------------------------------------- /nvchecker_source/vcs.py: -------------------------------------------------------------------------------- 1 | # MIT licensed 2 | # Copyright (c) 2013-2021 lilydjwg , et al. 3 | 4 | import asyncio 5 | import os.path as _path 6 | 7 | from nvchecker.api import GetVersionError 8 | 9 | _self_path = _path.dirname(_path.abspath(__file__)) 10 | 11 | def get_cmd_prefix(name): 12 | return [ 13 | 'bwrap', '--unshare-all', '--share-net', 14 | '--die-with-parent', 15 | '--ro-bind', '/', '/', '--tmpfs', '/home', '--tmpfs', '/run', 16 | '--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', 17 | '--ro-bind', _path.join(_self_path, 'vcs.sh'), '/tmp/vcs.sh', 18 | '--ro-bind', name, f'/tmp/{name}', '--chdir', '/tmp', 19 | '/bin/bash', '/tmp/vcs.sh', 20 | ] 21 | 22 | PROT_VER = 1 23 | 24 | def _parse_oldver(oldver): 25 | if oldver is None: 26 | return PROT_VER, 0, '' 27 | try: 28 | prot_ver, count, ver = oldver.split('.', maxsplit=2) 29 | prot_ver = int(prot_ver) 30 | count = int(count) 31 | except Exception: 32 | return PROT_VER, 0, '' 33 | if prot_ver != PROT_VER: 34 | return PROT_VER, 0, ver 35 | return PROT_VER, count, ver 36 | 37 | async def get_version(name, conf, *, cache, **kwargs): 38 | vcs = conf.get('vcs', '') 39 | use_max_tag = conf.get('use_max_tag', False) 40 | oldver = conf.get('oldver') 41 | dirname = name.split(':', 1)[0] 42 | cmd = get_cmd_prefix(dirname) + [dirname, vcs] 43 | if use_max_tag: 44 | cmd += ["get_tags"] 45 | 46 | output = await cache.get(tuple(cmd), run_cmd) 47 | 48 | if use_max_tag: 49 | return [tag for tag in output.split("\n")] 50 | else: 51 | oldvers = _parse_oldver(oldver) 52 | if output == oldvers[2]: 53 | return oldver 54 | else: 55 | return "%d.%d.%s" % (oldvers[0], oldvers[1] + 1, output) 56 | 57 | async def run_cmd(cmd): 58 | p = await asyncio.create_subprocess_exec( 59 | *cmd, 60 | stdout=asyncio.subprocess.PIPE, 61 | stderr=asyncio.subprocess.PIPE, 62 | ) 63 | 64 | output, error = await asyncio.wait_for(p.communicate(), 20) 65 | output = output.strip().decode('latin1') 66 | error = error.strip().decode('latin1') 67 | 68 | if p.returncode != 0: 69 | raise GetVersionError( 70 | 'command exited with error', output=output, 71 | returncode=p.returncode, error=error) 72 | else: 73 | return output 74 | -------------------------------------------------------------------------------- /nvchecker_source/vcs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | exec 3>&1 4 | exec >&2 5 | 6 | dir=$1 7 | vcs=$2 8 | get_tags=$3 9 | 10 | parse_vcs_url() { 11 | local _url=$1 12 | local _out_var=$2 13 | # remove folder:: 14 | [[ $_url =~ ^[^/:]*::(.*)$ ]] && _url=${BASH_REMATCH[1]} 15 | [[ $_url =~ ^(bzr|git|hg|svn)([+:])(.*) ]] || return 1 16 | local _proto=${BASH_REMATCH[1]} 17 | [[ ${BASH_REMATCH[2]} = + ]] && _url=${BASH_REMATCH[3]} 18 | local _real_url=${_url%\#*} 19 | local _frag='' 20 | [[ $_real_url = $_url ]] || _frag=${_url##*\#} 21 | eval "${_out_var}"'=("${_proto}" "${_real_url}" "${_frag}")' 22 | } 23 | 24 | get_vcs() { 25 | local _vcs=$1 26 | local _out_var=$2 27 | if [[ -z $_vcs ]]; then 28 | _vcs=$(. "${dir}"/PKGBUILD &> /dev/null 29 | for src in "${source[@]}"; do 30 | parse_vcs_url "$src" _ && { 31 | echo "$src" 32 | exit 0 33 | } 34 | done 35 | exit 1) || return 1 36 | fi 37 | parse_vcs_url "$_vcs" "$_out_var" 38 | } 39 | 40 | git_get_version() { 41 | local _url=$1 42 | local _frag=$2 43 | local _ref='' 44 | if [[ -z $_frag ]]; then 45 | _ref=HEAD 46 | elif [[ $_frag =~ ^commit=(.*)$ ]]; then 47 | echo "${BASH_REMATCH[1]}" 48 | return 0 49 | elif [[ $_frag =~ ^branch=(.*)$ ]]; then 50 | _ref=refs/heads/${BASH_REMATCH[1]} 51 | elif [[ $_frag =~ ^tag=(.*)$ ]]; then 52 | _ref=refs/tags/${BASH_REMATCH[1]} 53 | else 54 | return 1 55 | fi 56 | local _res=$(timeout 60s git ls-remote "$_url" "$_ref") 57 | [[ $_res =~ ^([a-fA-F0-9]*)[[:blank:]] ]] || return 1 58 | echo "${BASH_REMATCH[1]}" 59 | } 60 | 61 | hg_get_version() { 62 | local _url=$1 63 | local _frag=$2 64 | local _ref 65 | if [[ -z $_frag ]]; then 66 | _ref=default 67 | elif [[ $_frag =~ ^(revision|tag|branch)=(.*)$ ]]; then 68 | _ref=${BASH_REMATCH[2]} 69 | else 70 | return 1 71 | fi 72 | hg identify "${_url}#${_ref}" 73 | } 74 | 75 | svn_get_version() { 76 | local _url=$1 77 | local _frag=$2 78 | local _extra_arg=() 79 | if [[ -z $_frag ]]; then 80 | true 81 | elif [[ $_frag =~ ^(revision)=(.*)$ ]]; then 82 | _extra_arg=(-r "${BASH_REMATCH[2]}") 83 | else 84 | return 1 85 | fi 86 | # Get rid of locale 87 | env -i PATH="${PATH}" svn info "${_extra_arg[@]}" "${_url}" | \ 88 | sed -n 's/^Revision:[[:blank:]]*\([0-9]*\)/\1/p' 89 | } 90 | 91 | bzr_get_version() { 92 | local _url=$1 93 | local _frag=$2 94 | local _extra_arg=() 95 | if [[ -z $_frag ]]; then 96 | true 97 | elif [[ $_frag =~ ^(revision)=(.*)$ ]]; then 98 | _extra_arg=(-r "${BASH_REMATCH[2]}") 99 | else 100 | return 1 101 | fi 102 | bzr revno -q "${_extra_arg[@]}" "${_url}" 103 | } 104 | 105 | git_get_tags() { 106 | local _url=$1 107 | git ls-remote "$_url" | grep -oP '(?<=refs/tags/)[^^]*$' 108 | } 109 | 110 | get_vcs "${vcs}" components || exit 1 111 | if [[ "x$get_tags" == "xget_tags" ]]; then 112 | eval "${components[0]}_get_tags"' ${components[@]:1}' >&3 113 | else 114 | eval "${components[0]}_get_version"' ${components[@]:1}' >&3 115 | fi 116 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_default_fixture_loop_scope = session 3 | -------------------------------------------------------------------------------- /recv_gpg_keys: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if [[ -z $SANDBOXED ]]; then 4 | [[ -d /run/user/$UID/gnupg ]] && bind_gnupg=("--ro-bind" "/run/user/$UID/gnupg" "/run/user/$UID/gnupg") 5 | exec bwrap --unshare-all --ro-bind / / --tmpfs /home --tmpfs /tmp \ 6 | --tmpfs /run "${bind_gnupg[@]}" \ 7 | --proc /proc --dev /dev --die-with-parent \ 8 | --ro-bind "$0" /tmp/recv_gpg_keys \ 9 | --bind ~/.lilac/gnupg "$HOME/.gnupg" \ 10 | --ro-bind PKGBUILD /tmp/PKGBUILD --chdir /tmp --setenv SANDBOXED 1 \ 11 | /tmp/recv_gpg_keys "$@" 12 | fi 13 | 14 | . /usr/share/makepkg/util.sh 15 | . ./PKGBUILD 16 | for key in "${validpgpkeys[@]}"; do 17 | echo "Receiving key ${key}..." 18 | # try both servers as some keys exist one place and others another 19 | # we also want to always try to receive keys to pick up any update 20 | timeout -v 60 gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys "$key" || true 21 | timeout -v 60 gpg --keyserver hkps://keys.openpgp.org --recv-keys "$key" || true 22 | done 23 | -------------------------------------------------------------------------------- /schema-docs/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean 2 | 3 | SHELL := /bin/bash 4 | 5 | all: build/.lilacyaml-doc 6 | 7 | build/.lilacyaml-doc: build/lilac.json build/docs/.timestamp 8 | bootprint json-schema $< build/docs 9 | rm build/docs/*.map 10 | sed -i '//s/Bootprint/lilac.yaml/' build/docs/index.html 11 | (cd build/docs && if [[ -n $$(git status -s) ]]; then git add -A && git commit -m 'update docs'; fi) 12 | touch $@ 13 | 14 | build/lilac.json: lilac-yaml-schema.yaml build/.timestamp 15 | ./yaml2json < $< > $@ 16 | 17 | build/.timestamp: 18 | mkdir build 19 | touch $@ 20 | 21 | build/docs/.timestamp: build/.timestamp 22 | if [[ ! -d build/docs ]]; then git worktree add build/docs gh-pages; fi 23 | touch $@ 24 | 25 | clean: 26 | rm -rf build 27 | -------------------------------------------------------------------------------- /schema-docs/lilac-py-fields.md: -------------------------------------------------------------------------------- 1 | # `lilac.py` 文件说明 2 | 3 | ## 打包流程 4 | * `pre_build`: 打包前执行的函数。 5 | * `post_build`: 打包成功后执行的函数。 6 | * `post_build_always`: 打包最后执行的函数,不论成功与否。可选。 7 | * `prepare`: 清理前执行的函数。可选。如果返回字符串,则跳过打包。用于在明知无法打包时留下上一次打的包,以便依赖使用。 8 | 9 | ## 辅助信息 10 | * `time_limit_hours`: `*-build` 的时间限制。单位为小时,默认为 1 。 11 | * `build_args`: 传递给 `*-build` 的额外参数。可选。 12 | * `makechrootpkg_args`: 传递给 `makechrootpkg` 的额外参数。可选。 13 | * `makepkg_args`: 传递给 `makepkg` 的额外参数。可选。自带 `--holdver` 。 14 | 15 | ## 提供的信息 16 | * `_G.oldver`: 旧版本号。可能为 `None`。当被非第一项 `update_on` 触发(rebuild)时,也可能与新版本号相同。 17 | * `_G.newver`: 新版本号。可能为 `None`。 18 | * `_G.oldvers`: 对应的 `update_on` 项目的旧版本号列表。 19 | * `_G.newvers`: 对应的 `update_on` 项目的新版本号列表。 20 | * `_G.on_build_vers` (`list[tuple[str, str]]`): 对应的 `update_on_build` 项目的旧和新的版本号列表,查不到则为空字符串。该功能需要启用数据库支持。 21 | * `lilac.yaml` 中的信息(如 `repo_depends`、`maintainers`,已进行基本的解析) 22 | -------------------------------------------------------------------------------- /schema-docs/lilac-yaml-schema.yaml: -------------------------------------------------------------------------------- 1 | $schema: http://json-schema.org/draft-07/schema# 2 | $id: http://example.com/product.schema.json 3 | title: lilac.yaml 4 | description: Descriptive configuration data for lilac packaging 5 | type: object 6 | properties: 7 | build_prefix: 8 | description: The prefix of build command to be used. E.g. extra-x86_64, multilib or archlinuxcn-x86_64. 9 | type: string 10 | default: "extra-$(uname -m)" 11 | enum: 12 | - extra-x86_64 13 | - archlinuxcn-x86_64 14 | - multilib 15 | - multilib-archlinuxcn 16 | - extra-aarch64 17 | pre_build: 18 | description: Name of function to be used as the pre_build function. 19 | type: string 20 | post_build: 21 | description: Name of function to be used as the post_build function. 22 | type: string 23 | post_build_always: 24 | description: Name of function to be used as the post_build_always function. 25 | type: string 26 | pre_build_script: 27 | description: Body of the pre_build function. lilac2.api functions are available. 28 | type: string 29 | post_build_script: 30 | description: Body of the post_build function. lilac2.api functions are available. 31 | type: string 32 | post_build_always_script: 33 | description: Body of the post_build_always function. lilac2.api functions are available. 34 | type: string 35 | time_limit_hours: 36 | description: Time limit in hours. The build will be aborted if it doesn't finish in time. Default is one hour. 37 | type: number 38 | repo_depends: 39 | description: Packages in the repo that are direct dependencies of the current package. 40 | type: array 41 | items: 42 | anyOf: 43 | - type: string 44 | description: Package (directory) name 45 | - type: object 46 | description: Package base (directory) as key and package name as value 47 | minProperties: 1 48 | maxProperties: 1 49 | additionalProperties: 50 | type: string 51 | repo_makedepends: 52 | description: Packages in the repo that are in makedepends or checkdepends of the current package. 53 | type: array 54 | items: 55 | anyOf: 56 | - type: string 57 | description: Package (directory) name 58 | - type: object 59 | description: Package base (directory) as key and package name as value 60 | minProperties: 1 61 | maxProperties: 1 62 | additionalProperties: 63 | type: string 64 | update_on: 65 | description: Configure how nvchecker should check for updates / rebuilds? The first should check for updates and others for rebuilds. 66 | type: array 67 | items: 68 | anyOf: 69 | - type: object 70 | description: nvchecker configuration section 71 | properties: 72 | source: 73 | type: string 74 | description: nvchecker source name 75 | required: 76 | - source 77 | - type: object 78 | description: use an alias for nvchecker configuration section 79 | properties: 80 | alias: 81 | type: string 82 | description: alias name 83 | required: 84 | - alias 85 | minItems: 1 86 | update_on_build: 87 | description: Build this package if the configured packages are built in the same batch. Note that the packages should also be added to "repo_depends". 88 | type: array 89 | items: 90 | type: object 91 | description: on_build configuration 92 | properties: 93 | pkgbase: 94 | type: string 95 | description: pkgbase to build on 96 | from_pattern: 97 | type: string 98 | description: a regex to transform pkgbase's version. This requires database support to be enabled for lilac. 99 | to_pattern: 100 | type: string 101 | description: a replacement string to transform pkgbase's version to. This requires database support to be enabled for lilac. 102 | required: 103 | - pkgbase 104 | minItems: 0 105 | maintainers: 106 | description: List of maintainers for receiving email notifications 107 | type: array 108 | items: 109 | anyOf: 110 | - type: object 111 | description: GitHub username with a public email address 112 | properties: 113 | github: 114 | type: string 115 | required: 116 | - github 117 | - type: object 118 | description: GitHub username and an email address in the form "Name <user@host>". The GitHub public email does not matter. 119 | properties: 120 | github: 121 | type: string 122 | email: 123 | type: string 124 | required: 125 | - github 126 | - email 127 | minItems: 0 128 | staging: 129 | description: Whether to stage the package in a "staging" subdirectory 130 | type: boolean 131 | default: false 132 | managed: 133 | description: Whether the package should be built by lilac or not 134 | type: boolean 135 | default: true 136 | required: 137 | - maintainers 138 | -------------------------------------------------------------------------------- /schema-docs/special-files.md: -------------------------------------------------------------------------------- 1 | # 打包目录中的特殊文件 2 | 3 | * [`lilac.py`](lilac-py-fields.md): 打包控制脚本 4 | * `lilac.yaml`: 打包配置信息 5 | * `PKGBUILD`: 打包脚本 6 | * `package.list`: 对于无法使用正则确定包名的 split package,此文件每行一个包名,列出打包结果。用于防止软件包被自动清理 7 | * `.gitignore`: git 忽略文件。此文件不会被清理或者内建 AUR 下载器覆盖 8 | -------------------------------------------------------------------------------- /schema-docs/yaml2json: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import json 4 | import sys 5 | 6 | import yaml 7 | 8 | def main(): 9 | d = yaml.safe_load(sys.stdin) 10 | json.dump(d, sys.stdout, ensure_ascii=False, indent=2) 11 | print() 12 | 13 | if __name__ == '__main__': 14 | main() 15 | 16 | -------------------------------------------------------------------------------- /scripts/at-maintainer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import pathlib 5 | import re 6 | 7 | from lilac2.lilacyaml import iter_pkgdir, load_lilac_yaml 8 | 9 | REPOPATH = pathlib.Path('/ssddata/src/archgitrepo/archlinuxcn') 10 | 11 | PkgPattern = re.compile(r'[\w.+-]+') 12 | 13 | def main(): 14 | infos = {} 15 | for dir in iter_pkgdir(REPOPATH): 16 | try: 17 | infos[dir.name] = load_lilac_yaml(dir) 18 | except Exception as e: 19 | print(f'Failed to load lilac.yaml for {dir.name}: {e!r}', file=sys.stderr) 20 | 21 | for line in sys.stdin: 22 | m = PkgPattern.search(line) 23 | if not m: 24 | sys.stdout.write(line) 25 | continue 26 | 27 | pkg = m.group(0) 28 | if pkg not in infos: 29 | sys.stdout.write(line) 30 | continue 31 | 32 | m = infos[pkg] 33 | maints = [x['github'] for x in m['maintainers']] 34 | line = line.rstrip() + ' (%s)\n' % ' '.join(f'@{x}' for x in maints) 35 | sys.stdout.write(line) 36 | 37 | if __name__ == '__main__': 38 | main() 39 | -------------------------------------------------------------------------------- /scripts/build-cleaner: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import os 4 | import fcntl 5 | from sys import argv 6 | import re 7 | from contextlib import contextmanager, suppress 8 | import subprocess 9 | 10 | 11 | # ARCHBUILD='/tmp/workspace/archbuild' 12 | ARCHBUILD='/var/lib/archbuild' 13 | 14 | def usage(): 15 | print(''' 16 | Usage: %s [build_prefix [arch]] 17 | 18 | Clean up chroot building directory in /var/lib/archbuild/ with specified 19 | <build_prefix> and <arch>. 20 | 21 | If <build_prefix> and <arch> are not specified, clean up all copies. 22 | ''' % argv[0]) 23 | 24 | def get_prefixes(): 25 | return sorted({re.sub(r'-[^-]*$', '', s) for s in os.listdir(ARCHBUILD)}) 26 | 27 | def get_arches(): 28 | return sorted({re.sub(r'^.*-', '', s) for s in os.listdir(ARCHBUILD)}) 29 | 30 | @contextmanager 31 | def lock(filename): 32 | with open(filename, 'w') as lockfile: 33 | fcntl.flock(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) 34 | try: 35 | yield 36 | finally: 37 | os.unlink(filename) 38 | 39 | _is_btrfs_cache = None 40 | def _is_btrfs_subvolume(path): 41 | global _is_btrfs_cache 42 | if _is_btrfs_cache is None: 43 | try: 44 | p = subprocess.run( 45 | ['btrfs', 'subvolume', 'show', path], 46 | stdout = subprocess.DEVNULL, 47 | stderr = subprocess.DEVNULL) 48 | _is_btrfs_cache = p.returncode == 0 49 | except FileNotFoundError: 50 | _is_btrfs_cache = False 51 | 52 | return _is_btrfs_cache 53 | 54 | def drop_chroot(full_path): 55 | if _is_btrfs_subvolume(full_path): 56 | try: 57 | subprocess.check_call(['btrfs', 'subvolume', 'delete', full_path], stdout=subprocess.DEVNULL) 58 | except subprocess.CalledProcessError: # nested subvolumes 59 | # rm in coreutils >= 9.2 treat nested subvolumn as on another file system 60 | # and specifying "--one-file-system" flag would cause rm to return non-zero 61 | subprocess.check_call(['rm', '-rf', full_path]) 62 | else: 63 | subprocess.check_call(['rm', '-rf', '--one-file-system', full_path]) 64 | 65 | def main(): 66 | prefixes = arches = None 67 | with suppress(IndexError): 68 | prefixes = [argv[1]] 69 | arches = [argv[2]] 70 | 71 | if not prefixes: 72 | prefixes = get_prefixes() 73 | if not arches: 74 | arches = get_arches() 75 | 76 | for prefix in prefixes: 77 | for arch in arches: 78 | base = ARCHBUILD + '/' + prefix + '-' + arch 79 | if not os.path.isdir(base): 80 | continue 81 | 82 | print('----In %s-%s----' % (prefix, arch)) 83 | 84 | copies = [x for x in os.listdir(base) 85 | if os.path.isdir(base + '/' + x) and x != 'root' 86 | ] 87 | 88 | for copy in copies: 89 | full_path = base + '/' + copy 90 | try: 91 | with lock(full_path + '.lock'): 92 | print('\033[1;32mClean up copy: %s...' % copy, 93 | end='', flush=True) 94 | drop_chroot(full_path) 95 | print('done\033[1;0m') 96 | except BlockingIOError: 97 | print('\033[1;31mCopy in use, skipped: \033[1;33m%s\033[1;0m' % copy) 98 | print() 99 | 100 | 101 | if __name__ == '__main__': 102 | main() 103 | -------------------------------------------------------------------------------- /scripts/cleanup-dblck: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | '''cleanup leftover pacman db.lck files. 4 | 5 | Pacman doesn't use flock or similar reliable locks, and it frequently gets 6 | killed by SIGHUP or Ctrl-C without removing the lock file. 7 | 8 | So we clean up old locks hoping that pacman doesn't get stuck for a long time. 9 | Hopefully we're more reliable than pacman's file lock. 10 | ''' 11 | 12 | import glob 13 | import os 14 | import time 15 | 16 | EXPIRATION_TIME = 120 17 | 18 | def run_once(): 19 | t = time.time() 20 | for lock in glob.glob('/var/lib/archbuild/*/*/var/lib/pacman/db.lck'): 21 | try: 22 | m = os.path.getmtime(lock) 23 | except FileNotFoundError: 24 | continue 25 | 26 | if t - m > EXPIRATION_TIME: 27 | print(time.strftime('%Y-%m-%d %H:%M:%S'), 'unlinking stale lock file:', lock) 28 | os.unlink(lock) 29 | 30 | def main(): 31 | while True: 32 | run_once() 33 | time.sleep(60) 34 | 35 | if __name__ == '__main__': 36 | main() 37 | -------------------------------------------------------------------------------- /scripts/cleanup-dblck.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=cleanup stale devtools db.lck files 3 | 4 | [Service] 5 | Type=exec 6 | ExecStart=cleanup-dblck 7 | Restart=on-failure 8 | RestartSec=5s 9 | 10 | [Install] 11 | WantedBy=multi-user.target 12 | -------------------------------------------------------------------------------- /scripts/dbsetup.sql: -------------------------------------------------------------------------------- 1 | create schema if not exists lilac; 2 | set search_path to lilac; 3 | 4 | create type buildresult as enum ('successful', 'failed', 'skipped', 'staged'); 5 | 6 | create table pkglog ( 7 | id serial primary key, 8 | ts timestamp with time zone not null default current_timestamp, 9 | pkgbase text not null, 10 | nv_version text, 11 | pkg_version text, 12 | elapsed int not null, 13 | result buildresult not null, 14 | cputime int, 15 | memory bigint, 16 | msg text, 17 | build_reasons jsonb, 18 | maintainers jsonb 19 | ); 20 | 21 | create index pkglog_ts_idx on pkglog (ts); 22 | create index pkglog_pkgbase_idx on pkglog (pkgbase); 23 | 24 | create type batchevent as enum ('start', 'stop'); 25 | 26 | create table batch ( 27 | id serial primary key, 28 | ts timestamp with time zone not null default current_timestamp, 29 | event batchevent not null, 30 | logdir text 31 | ); 32 | 33 | create index batch_ts_idx on batch (ts); 34 | 35 | create type buildstatus as enum ('pending', 'building', 'done'); 36 | 37 | CREATE OR REPLACE FUNCTION updated_at_trigger() 38 | RETURNS TRIGGER AS $$ 39 | BEGIN 40 | IF row(NEW.*) IS DISTINCT FROM row(OLD.*) THEN 41 | NEW.updated_at = now(); 42 | RETURN NEW; 43 | ELSE 44 | RETURN OLD; 45 | END IF; 46 | RETURN NEW; 47 | END; 48 | $$ language 'plpgsql'; 49 | 50 | create table pkgcurrent ( 51 | id serial primary key, 52 | ts timestamp with time zone not null default current_timestamp, 53 | updated_at timestamp with time zone not null default current_timestamp, 54 | pkgbase text unique not null, 55 | index integer not null, 56 | status buildstatus not null, 57 | build_reasons jsonb not null 58 | ); 59 | 60 | CREATE TRIGGER pkgcurrent_updated BEFORE UPDATE 61 | ON pkgcurrent FOR EACH ROW EXECUTE PROCEDURE updated_at_trigger(); 62 | 63 | -------------------------------------------------------------------------------- /scripts/lilac-cleaner: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import os 4 | import subprocess 5 | from collections import defaultdict 6 | import shutil 7 | from pathlib import Path 8 | import time 9 | 10 | DRY_RUN = True 11 | 12 | def get_git_managed(): 13 | cmd = ['git', 'ls-files'] 14 | out = subprocess.check_output(cmd, text = True) 15 | 16 | pkgbase_to_files = defaultdict(list) 17 | for line in out.splitlines(): 18 | if line == '.gitignore': 19 | continue 20 | 21 | try: 22 | pkgbase, file = line.split('/', 1) 23 | except ValueError: 24 | # skip subproject commits 25 | continue 26 | pkgbase_to_files[pkgbase].append(file) 27 | 28 | return pkgbase_to_files 29 | 30 | def rmdir(d): 31 | if DRY_RUN: 32 | print('Would remove dir ', d) 33 | else: 34 | print('Removing dir ', d) 35 | shutil.rmtree(d, ignore_errors=True) 36 | 37 | def rmfile(f): 38 | if DRY_RUN: 39 | print('Would remove file', f) 40 | else: 41 | print('Removing file', f) 42 | f.unlink() 43 | 44 | def is_vcs(dir): 45 | files = [f.name for f in dir.iterdir()] 46 | return any(x in files for x in ['.git', '.hg', '.svn', 'packed-refs']) 47 | 48 | def process(dir, git_files): 49 | files = list(dir.iterdir()) 50 | mtimes = {f: f.stat().st_mtime for f in files} 51 | vcs_dirs = [] 52 | 53 | # don't delete files touched near last update 54 | try: 55 | protected_mtime = max(x for x in ( 56 | y for f, y in mtimes.items() if f.name in git_files) 57 | ) - 86400 58 | except ValueError: # max() arg is an empty sequence 59 | protected_mtime = time.time() 60 | 61 | for file in files: 62 | if file.name == '__pycache__': 63 | continue 64 | 65 | if file.name.endswith('.log'): 66 | # logs are being handled by find -delete using crontab 67 | # and keeped longer than source code 68 | continue 69 | 70 | if file.name.endswith(( 71 | '.pkg.tar.zst', '.pkg.tar.zst.sig', 72 | '.pkg.tar.xz', '.pkg.tar.xz.sig', 73 | )): 74 | continue 75 | 76 | is_dir = file.is_dir() 77 | 78 | if is_dir and is_vcs(file): 79 | vcs_dirs.append(file) 80 | continue 81 | 82 | if file.name in git_files: 83 | continue 84 | 85 | if mtimes[file] > protected_mtime: 86 | continue 87 | 88 | if is_dir: 89 | rmdir(file) 90 | else: 91 | rmfile(file) 92 | 93 | if vcs_dirs: 94 | vcs_max = max(mtimes[x] for x in vcs_dirs) 95 | for x in vcs_dirs: 96 | if vcs_max - mtimes[x] > 86400: 97 | rmdir(x) 98 | 99 | def main(repodir, force): 100 | global DRY_RUN 101 | 102 | DRY_RUN = not force 103 | 104 | os.chdir(repodir) 105 | pkgbase_to_files = get_git_managed() 106 | 107 | for dir in repodir.iterdir(): 108 | if dir.name == '.gitignore': 109 | continue 110 | 111 | if not dir.is_dir(): 112 | rmfile(dir) 113 | continue 114 | 115 | if not (dir / 'lilac.yaml').exists() and dir.name not in pkgbase_to_files: 116 | rmdir(dir) 117 | continue 118 | 119 | process(dir, pkgbase_to_files[dir.name]) 120 | 121 | if __name__ == '__main__': 122 | import argparse 123 | 124 | parser = argparse.ArgumentParser(description='clean up our git repository used by lilac') 125 | parser.add_argument('-f', action='store_true', 126 | help='do not dry-run; really delete files') 127 | parser.add_argument('DIR', nargs='?', 128 | help="path to the repository; read lilac's config by default") 129 | args = parser.parse_args() 130 | 131 | if args.DIR: 132 | repodir = Path(args.DIR) 133 | else: 134 | from lilac2.tools import read_config 135 | config = read_config() 136 | repodir = Path(config['repository']['repodir']).expanduser() 137 | 138 | if args.f: 139 | try: 140 | from lilac2.vendor.myutils import lock_file 141 | from lilac2.const import mydir 142 | except ImportError: 143 | pass 144 | else: 145 | lock_file(mydir / '.lock') 146 | 147 | main(repodir, args.f) 148 | -------------------------------------------------------------------------------- /scripts/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | br=$(git branch | awk '$1 == "*" {print $2}') 4 | if [[ $br != master ]]; then 5 | exit 6 | fi 7 | 8 | pyflakes lilac2 lilac 9 | python "$(which mypy)" lilac2 lilac 10 | -------------------------------------------------------------------------------- /scripts/sendmail-test: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from lilac2.tools import read_config 4 | from lilac2.repo import Repo 5 | 6 | def main(): 7 | config = read_config() 8 | r = Repo(config) 9 | r.sendmail(YOUR_ADDRESS_HERE, 'test', 'This is a test mail.') 10 | 11 | if __name__ == '__main__': 12 | main() 13 | 14 | -------------------------------------------------------------------------------- /scripts/sign_and_copy: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | ''' 4 | sign_and_copy missed packages 5 | 6 | this is a problem-fixing script. 7 | ''' 8 | 9 | from pathlib import Path 10 | 11 | from lilac2.lilacyaml import iter_pkgdir 12 | from lilac2.building import sign_and_copy 13 | 14 | REPODIR = Path('~/archgitrepo/archlinuxcn').expanduser() 15 | DESTDIR = Path('~/repo').expanduser() 16 | 17 | def main(): 18 | with open('/data/repo/lastupdate') as f: 19 | lastupdate = int(f.read().strip()) 20 | 21 | for pkgdir in iter_pkgdir(REPODIR): 22 | pkgs = [x for x in pkgdir.iterdir() 23 | if x.name.endswith(('.pkg.tar.xz', '.pkg.tar.zst'))] 24 | if not pkgs: 25 | continue 26 | pkg0 = pkgs[0] 27 | st = pkg0.stat() 28 | if st.st_nlink > 1: 29 | continue 30 | if st.st_mtime >= lastupdate: 31 | print(f'sign_and_copy {pkgdir.name}') 32 | sign_and_copy(pkgdir, DESTDIR) 33 | 34 | if __name__ == '__main__': 35 | main() 36 | -------------------------------------------------------------------------------- /scripts/tailf-build-log: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import select 5 | import os 6 | import datetime 7 | 8 | import psycopg2 9 | import psycopg2.extras 10 | 11 | from lilac2.vendor.myutils import humantime, filesize 12 | 13 | def c(n): 14 | return f'\x1b[38;5;{n}m' 15 | 16 | FMT = { 17 | 'successful': f'[{c(12)}%(ts)s{c(7)}] {c(15)}%(pkgbase)s{c(7)} %(nv_version)s %(action)s{c(7)} as {c(15)}%(pkg_version)s{c(7)} in {c(6)}%(elapsed)s', 18 | 'staged': f'[{c(12)}%(ts)s{c(7)}] {c(15)}%(pkgbase)s{c(7)} %(nv_version)s %(action)s{c(7)} as {c(15)}%(pkg_version)s{c(7)} in {c(6)}%(elapsed)s', 19 | 'failed': f'[{c(12)}%(ts)s{c(7)}] {c(15)}%(pkgbase)s{c(7)} %(nv_version)s %(action)s{c(7)} to build as {c(15)}%(pkg_version)s{c(7)} in {c(6)}%(elapsed)s', 20 | 'skipped': f'[{c(12)}%(ts)s{c(7)}] {c(15)}%(pkgbase)s{c(7)} %(nv_version)s %(action)s{c(7)} because {c(15)}%(msg)s', 21 | '_rusage': f'{c(7)}; CPU time: {c(6)}%(cputime)s{c(7)} (%(cpupercent)s%%{c(7)}), Memory: {c(5)}%(memory)s\n', 22 | '_batch': f'[{c(12)}%(ts)s{c(7)}] {c(14)}build %(event)s\n', 23 | } 24 | 25 | ACTION = { 26 | 'successful': f'{c(10)}built', 27 | 'staged': f'{c(12)}staged', 28 | 'failed': f'{c(9)}failed', 29 | 'skipped': f'{c(3)}skipped', 30 | } 31 | 32 | N_CORES = os.cpu_count() 33 | 34 | def color_gradient(v): 35 | r = 255 - v * 255 36 | g = v * 510 37 | b = v * 255 38 | if g > 255: 39 | g = 510 - g 40 | r = round(r) 41 | g = round(g) 42 | b = round(b) 43 | return f"\x1b[38;2;{r};{g};{b}m" 44 | 45 | def parse_hex_color(c): 46 | return tuple(int(x, 16) for x in (c[1:3], c[3:5], c[5:7])) 47 | 48 | def pretty_print(log): 49 | if 'event' in log: 50 | fmt = FMT['_batch'] 51 | args = { 52 | 'ts': log['ts'].strftime('%Y-%m-%d %H:%M:%S'), 53 | 'event': log['event'], 54 | } 55 | out = c(7) + fmt % args 56 | sys.stdout.write(out) 57 | return 58 | 59 | result = log['result'] 60 | cputime = log['cputime'] 61 | memory = log['memory'] 62 | if cputime is None: 63 | cputime = 0 64 | memory = 0 65 | 66 | if log['elapsed']: 67 | cpupercent = round(100 * cputime / log['elapsed']) 68 | else: 69 | cpupercent = 0 70 | cpupercent = color_gradient(1 - cpupercent / 100 / N_CORES) + str(cpupercent) 71 | 72 | args = { 73 | 'ts': log['ts'].strftime('%Y-%m-%d %H:%M:%S'), 74 | 'pkgbase': log['pkgbase'], 75 | 'nv_version': log['nv_version'], 76 | 'action': ACTION.get(result), 77 | 'pkg_version': log['pkg_version'], 78 | 'elapsed': humantime(log['elapsed']), 79 | 'msg': log['msg'], 80 | 'cputime': humantime(cputime), 81 | 'cpupercent': cpupercent, 82 | 'memory': filesize(memory), 83 | } 84 | 85 | fmt = FMT[result] 86 | out = c(7) + fmt % args + FMT['_rusage'] % args 87 | if result == 'failed': 88 | out += f'{c(8)}{log["msg"][:1000]}\n' 89 | sys.stdout.write(out) 90 | 91 | def iter_pkglog(): 92 | conn = psycopg2.connect('') 93 | with conn: 94 | cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) 95 | cursor.execute('select * from lilac.pkglog order by ts desc limit 40') 96 | logs = list(cursor) 97 | 98 | cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) 99 | cursor.execute('select * from lilac.batch where ts > %s', (logs[-1]['ts'],)) 100 | logs.extend(cursor) 101 | logs.sort(key=lambda x: x['ts']) 102 | 103 | cursor = conn.cursor() 104 | cursor.execute('listen build_updated') 105 | 106 | for log in logs: 107 | log['ts'] = log['ts'].astimezone() 108 | yield from logs 109 | 110 | last_ts = datetime.datetime.now().astimezone() 111 | poll = select.poll() 112 | poll.register(conn, select.POLLIN) 113 | 114 | while True: 115 | poll.poll() 116 | conn.poll() 117 | 118 | with conn: 119 | cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) 120 | cursor.execute('select * from lilac.pkglog where ts > %s', (last_ts,)) 121 | logs = list(cursor) 122 | 123 | cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) 124 | cursor.execute('select * from lilac.batch where ts > %s', (last_ts,)) 125 | logs.extend(cursor) 126 | 127 | if logs: 128 | logs.sort(key=lambda x: x['ts']) 129 | last_ts = logs[-1]['ts'] 130 | 131 | for log in logs: 132 | log['ts'] = log['ts'].astimezone() 133 | yield from logs 134 | 135 | def main(): 136 | for log in iter_pkglog(): 137 | try: 138 | pretty_print(log) 139 | except Exception: 140 | print(log) 141 | raise 142 | 143 | if __name__ == '__main__': 144 | try: 145 | main() 146 | except KeyboardInterrupt: 147 | sys.exit(130) 148 | -------------------------------------------------------------------------------- /scripts/update-archpkg-to-alpm: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import pathlib 4 | 5 | from ruamel.yaml import round_trip_dump 6 | from ruamel.yaml.util import load_yaml_guess_indent 7 | from pyalpm import Handle 8 | 9 | from lilac2.lilacyaml import iter_pkgdir 10 | 11 | handle = None 12 | dbs = None 13 | 14 | def load_alpm(): 15 | global handle, dbs 16 | 17 | handle = Handle('/', '/var/lib/pacman') 18 | dbs = {repo: handle.register_syncdb(repo, 0) 19 | for repo in ['core', 'extra', 'community', 'multilib']} 20 | 21 | def get_repo(pkg): 22 | for repo, db in dbs.items(): 23 | if db.get_pkg(pkg): 24 | return repo 25 | 26 | def convert_entry(conf): 27 | conf['source'] = 'alpm' 28 | conf['alpm'] = conf.pop('archpkg') 29 | conf['repo'] = get_repo(conf['alpm']) 30 | return conf 31 | 32 | def update_lilacyaml(pkgdir: pathlib.Path) -> None: 33 | # use ruamel.yaml for yaml manipulation with preserving indents and comments 34 | lilac_yaml_path = pkgdir / 'lilac.yaml' 35 | 36 | with open(lilac_yaml_path) as f: 37 | lilac_yaml, indent, block_seq_indent = load_yaml_guess_indent(f.read()) 38 | 39 | update_on = lilac_yaml.get('update_on', []) 40 | if not update_on: 41 | return 42 | 43 | changed = False 44 | for idx, entry in enumerate(update_on): 45 | if 'alias' in entry: 46 | continue 47 | 48 | if entry.get('source') != 'archpkg': 49 | continue 50 | 51 | changed = True 52 | update_on[idx] = convert_entry(entry) 53 | 54 | if changed: 55 | with open(lilac_yaml_path, 'w') as f: 56 | round_trip_dump(lilac_yaml, stream=f, indent=indent, 57 | block_seq_indent=block_seq_indent) 58 | 59 | def main(): 60 | repodir = pathlib.Path('/ldata/src/archgitrepo/archlinuxcn') 61 | load_alpm() 62 | for dir in iter_pkgdir(repodir): 63 | update_lilacyaml(dir) 64 | 65 | if __name__ == '__main__': 66 | main() 67 | -------------------------------------------------------------------------------- /scripts/useful.sql: -------------------------------------------------------------------------------- 1 | -- some useful SQL commands (for PostgreSQL) 2 | 3 | -- show build log 4 | select id, ts, pkgbase, nv_version, pkg_version, elapsed, result, cputime, case when elapsed = 0 then 0 else cputime * 100 / elapsed end as "cpu%", round(memory / 1073741824.0, 3) as "memory (GiB)", substring(msg for 20) as msg, build_reasons, (select array_agg(github) from jsonb_to_recordset(maintainers) as m(github text)) as maintainers from pkglog order by id desc limit 10; 5 | 6 | -- show current build status and expected time 7 | select index, c.pkgbase, updated_at, status, elapsed as last_time, c.build_reasons from pkgcurrent as c left join lateral ( 8 | select elapsed from pkglog where pkgbase = c.pkgbase order by ts desc limit 1 9 | ) as log on true order by c.index asc; 10 | 11 | -- authorize a group of people to select 12 | create role pkg; 13 | grant connect on database lilac_db to pkg; 14 | grant usage on schema lilac to pkg; 15 | grant select on all tables in schema lilac to pkg; 16 | 17 | -- create and grant each user 18 | create role newuser login; 19 | grant pkg to newuser; 20 | -------------------------------------------------------------------------------- /scripts/yaourt-G: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import os 5 | import pathlib 6 | 7 | topdir = pathlib.Path(__file__).resolve().parent.parent 8 | sys.path.append(str(topdir)) 9 | sys.path.append(str(topdir / 'vendor')) 10 | 11 | from lilac2.api import download_official_pkgbuild 12 | 13 | def main(): 14 | name = sys.argv[1] 15 | os.mkdir(name) 16 | os.chdir(name) 17 | download_official_pkgbuild(name) 18 | 19 | if __name__ == '__main__': 20 | main() 21 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from setuptools import find_packages, setup 3 | 4 | setup( 5 | name = 'archlinuxcn-lilac', 6 | use_scm_version = True, 7 | description = 'The build bot for archlinuxcn', 8 | author = 'lilydjwg', 9 | author_email = 'lilydjwg@gmail.com', 10 | python_requires = '>=3.12.0', 11 | url = 'https://github.com/archlinuxcn/lilac', 12 | zip_safe = False, 13 | packages = find_packages(exclude=('tests',)) + ['nvchecker_source'], 14 | py_modules = ['lilaclib'], 15 | scripts = ['lilac', 'recv_gpg_keys', 'scripts/build-cleaner', 'scripts/lilac-cleaner'], 16 | setup_requires = ['setuptools_scm'], 17 | # See README.md 18 | install_requires = [ 19 | 'requests', 'lxml', 'PyYAML', 'pyalpm', 'structlog', 'python_prctl', 20 | 'fluent.runtime', 21 | ], 22 | include_package_data = True, 23 | package_data = { 24 | 'lilac2': ['aliases.yaml', 'l10n/*/*.ftl'], 25 | }, 26 | classifiers = [ 27 | 'Programming Language :: Python', 28 | 'Programming Language :: Python :: 3', 29 | 'Programming Language :: Python :: 3.12', 30 | 'Programming Language :: Python :: 3.13', 31 | ], 32 | ) 33 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import sys 3 | 4 | # sys.path does not support `Path`s yet 5 | this_dir = pathlib.Path(__file__).resolve() 6 | sys.path.insert(0, str(this_dir.parents[1])) 7 | sys.path.insert(0, str(this_dir.parents[1] / 'lilac2' / 'vendor')) 8 | -------------------------------------------------------------------------------- /tests/fixtures/mxnet-git-b628fc716d23ae88373c6bd1089409297ccb2a38.diff: -------------------------------------------------------------------------------- 1 | diff --git a/PKGBUILD b/PKGBUILD 2 | index 56d108d..d566882 100644 3 | --- a/PKGBUILD 4 | +++ b/PKGBUILD 5 | @@ -4,8 +4,8 @@ _cudaarch=Common 6 | _pkgname=mxnet 7 | pkgname=('mxnet-git' 'mxnet-mkl-git' 'mxnet-cuda-git' 'mxnet-cuda-mkl-git') 8 | _pkgver=1.5.1 9 | -pkgver=1.5.1.r10454.3d38dbde74 10 | -pkgrel=2 11 | +pkgver=1.5.1.r10461.18c9a69e2f 12 | +pkgrel=1 13 | pkgdesc="A flexible and efficient library for deep learning" 14 | arch=('x86_64') 15 | url="http://mxnet.io/" 16 | -------------------------------------------------------------------------------- /tests/fixtures/mxnet-git-c80336319e1a3e60178d815a48690e90d2a0c889.diff: -------------------------------------------------------------------------------- 1 | diff --git a/PKGBUILD b/PKGBUILD 2 | index 931d0b4..d2c0775 100644 3 | --- a/PKGBUILD 4 | +++ b/PKGBUILD 5 | @@ -5,7 +5,7 @@ _cudaarch="6.0;6.1;7.0;7.5" 6 | _pkgname=mxnet 7 | pkgname=('mxnet-git' 'mxnet-cuda-git') 8 | _pkgver=2.0.0 9 | -pkgver=2.0.0.r10966.68cb9555c4 10 | +pkgver=2.0.0.r10976.8a5886a677 11 | pkgrel=1 12 | pkgdesc='A flexible and efficient library for deep learning' 13 | arch=('x86_64') 14 | -------------------------------------------------------------------------------- /tests/fixtures/mxnet-git-c88817c10e95f9d9afd7928b973504c4085b4b6c.diff: -------------------------------------------------------------------------------- 1 | diff --git a/PKGBUILD b/PKGBUILD 2 | index 4022c41..a5744ce 100644 3 | --- a/PKGBUILD 4 | +++ b/PKGBUILD 5 | @@ -5,7 +5,7 @@ _cudaarch="6.0;6.1;7.0;7.5" 6 | _pkgname=mxnet 7 | pkgname=('mxnet-git' 'mxnet-cuda-git') 8 | _pkgver=2.0.0 9 | -pkgver=2.0.0.r10875.0bff90dcfe 10 | +pkgver=2.0.0.r10900.5542d03695 11 | pkgrel=1 12 | pkgdesc='A flexible and efficient library for deep learning' 13 | arch=('x86_64') 14 | @@ -137,6 +137,7 @@ _package() { 15 | 16 | # create neccesarry soft links 17 | ln -sf '/usr/lib/libmxnet.so' "${pkgdir}/usr/lib/python$(get_pyver)/site-packages/mxnet/libmxnet.so" 18 | + ln -s "/usr/include" "${pkgdir}/usr/lib/python$(get_pyver)/site-packages/mxnet/include" 19 | 20 | # remove unwantted files 21 | rm -rfv "${pkgdir}/usr/mxnet" 22 | -------------------------------------------------------------------------------- /tests/fixtures/nodejs-web-ext-e4d4a1c33026d221ebf6570cc0a33c99dc4b1d9d.diff: -------------------------------------------------------------------------------- 1 | diff --git a/PKGBUILD b/PKGBUILD 2 | index daabf2b..ff2c899 100644 3 | --- a/PKGBUILD 4 | +++ b/PKGBUILD 5 | @@ -2,7 +2,7 @@ 6 | 7 | _npmname=web-ext 8 | pkgname=nodejs-$_npmname 9 | -pkgver=4.1.0 10 | +pkgver=4.2.0 11 | pkgrel=1 12 | pkgdesc='A command line tool to help build, run, and test web extensions' 13 | arch=(any) 14 | -------------------------------------------------------------------------------- /tests/fixtures/python-onnxruntime-7447a82a3fac720bbb85ba5cea5d99f7d6920690.diff: -------------------------------------------------------------------------------- 1 | diff --git a/PKGBUILD b/PKGBUILD 2 | index 91aff66..2ce18ae 100644 3 | --- a/PKGBUILD 4 | +++ b/PKGBUILD 5 | @@ -4,7 +4,7 @@ pkgbase=python-onnxruntime 6 | pkgname=(python-onnxruntime python-onnxruntime-cuda) 7 | pkgver=1.2.0 8 | pkgdesc='Cross-platform, high performance scoring engine for ML models' 9 | -pkgrel=4 10 | +pkgrel=5 11 | arch=(x86_64) 12 | url='https://github.com/microsoft/onnxruntime' 13 | license=(MIT) 14 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lilac2.api import ( 4 | _unquote_item, 5 | _add_into_array, 6 | _allow_update_aur_repo, 7 | ) 8 | 9 | @pytest.mark.parametrize('shell_str, python_str', [ 10 | ('"abc"', 'abc'), 11 | ("'abc'", 'abc'), 12 | ]) 13 | def test_unquote_item(shell_str, python_str): 14 | assert _unquote_item(shell_str) == python_str 15 | 16 | @pytest.mark.parametrize('line, extra_elements, line_expected', [ 17 | ("some_array=()", ["ab", "bc"], '''some_array=("ab" "bc")'''), 18 | ("some_array=('ab', 'bc')", ["cd"], '''some_array=("ab" "bc" "cd")'''), 19 | ('''some_array=("ab" "bc")''', ["cd"], '''some_array=("ab" "bc" "cd")'''), 20 | ('''some_array=("ab" 'bc')''', ["cd"], '''some_array=("ab" "bc" "cd")'''), 21 | ('''some_array=("ab"''', ["cd"], '''some_array=("ab" "cd"'''), 22 | # https://github.com/archlinuxcn/lilac/issues/164 23 | ('''some_array=("$foo"''', ["bar"], '''some_array=("$foo" "bar"'''), 24 | ]) 25 | def test_add_into_array(line, extra_elements, line_expected): 26 | assert _add_into_array(line, extra_elements) == line_expected 27 | 28 | # commits are from https://aur.archlinux.org/{pkgname}.git 29 | @pytest.mark.parametrize('pkgname, commit_sha1, expected', [ 30 | ('mxnet-git', 'b628fc716d23ae88373c6bd1089409297ccb2a38', False), 31 | ('mxnet-git', 'c80336319e1a3e60178d815a48690e90d2a0c889', False), 32 | ('mxnet-git', 'c88817c10e95f9d9afd7928b973504c4085b4b6c', True), 33 | ('nodejs-web-ext', 'e4d4a1c33026d221ebf6570cc0a33c99dc4b1d9d', True), 34 | ('python-onnxruntime', '7447a82a3fac720bbb85ba5cea5d99f7d6920690', False), 35 | ]) 36 | def test_allow_update_aur_repo(pkgname, commit_sha1, expected): 37 | with open(f'tests/fixtures/{pkgname}-{commit_sha1}.diff') as f: 38 | diff = f.read() 39 | assert _allow_update_aur_repo(pkgname, diff) == expected 40 | -------------------------------------------------------------------------------- /tests/test_dependency_resolution.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from pathlib import Path 3 | 4 | from lilac2.packages import DependencyManager, get_dependency_map 5 | 6 | def test_dependency_map(): 7 | depman = DependencyManager(Path('.')) 8 | Info = namedtuple('Info', ['repo_depends', 'repo_makedepends']) 9 | lilacinfos = { 10 | 'A': Info(['B'], ['C']), 11 | 'B': Info(['D'], ['C']), 12 | 'C': Info([], ['E']), 13 | 'D': Info([], []), 14 | 'E': Info(['D'], []), 15 | 'F': Info([], ['C', 'D']), 16 | 'G': Info([], ['F']), 17 | } 18 | expected_all = { 19 | 'A': { 'B', 'C', 'D', 'E' }, 20 | 'B': { 'C', 'D', 'E' }, 21 | 'C': { 'D', 'E' }, 22 | 'D': set(), 23 | 'E': { 'D' }, 24 | 'F': { 'C', 'D', 'E' }, 25 | 'G': { 'C', 'D', 'E', 'F' }, 26 | } 27 | expected_build = { 28 | 'A': { 'B', 'C', 'D' }, 29 | 'B': { 'C', 'D' }, 30 | 'C': { 'D', 'E' }, 31 | 'D': set(), 32 | 'E': { 'D' }, 33 | 'F': { 'C', 'D' }, 34 | 'G': { 'F' }, 35 | } 36 | 37 | res_all, res_build = get_dependency_map(depman, lilacinfos) 38 | def parse_map(m): 39 | return { key: { val.pkgdir.name for val in s } for key, s in m.items() } 40 | assert parse_map(res_all) == expected_all 41 | assert parse_map(res_build) == expected_build 42 | -------------------------------------------------------------------------------- /tests/test_lilaclib.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lilaclib import ( 4 | update_pkgrel, 5 | ) 6 | 7 | from lilac2.vendor.myutils import at_dir 8 | 9 | @pytest.mark.parametrize('pkgbuild, expected_pkgbuild, kwargs', [ 10 | ('pkgrel=1', 'pkgrel=2', {}), 11 | ('pkgrel=10', 'pkgrel=11', {}), 12 | ('pkgrel=1.1', 'pkgrel=2', {}), 13 | ('pkgrel="1"', 'pkgrel=2', {}), 14 | ('pkgrel=1', 'pkgrel=3', {'rel': 3}), 15 | ]) 16 | def test_update_pkgrel(tmpdir, pkgbuild, expected_pkgbuild, kwargs): 17 | with at_dir(tmpdir): 18 | with open('PKGBUILD', 'w') as f: 19 | f.write(pkgbuild) 20 | update_pkgrel(**kwargs) 21 | with open('PKGBUILD', 'r') as f: 22 | new_pkgbuild = f.read() 23 | assert new_pkgbuild == expected_pkgbuild 24 | -------------------------------------------------------------------------------- /tests/test_rpkgs.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | import pytest_asyncio 5 | 6 | pytestmark = pytest.mark.asyncio 7 | 8 | from nvchecker import core, __main__ as main 9 | from nvchecker.util import Entries, RichResult, RawResult 10 | 11 | async def run(entries: Entries) -> RichResult: 12 | task_sem = asyncio.Semaphore(20) 13 | result_q: asyncio.Queue[RawResult] = asyncio.Queue() 14 | keymanager = core.KeyManager(None) 15 | 16 | dispatcher = core.setup_httpclient() 17 | entry_waiter = core.EntryWaiter() 18 | futures = dispatcher.dispatch( 19 | entries, task_sem, result_q, 20 | keymanager, entry_waiter, 1, {}, 21 | ) 22 | 23 | oldvers: RichResult = {} 24 | result_coro = core.process_result(oldvers, result_q, entry_waiter) 25 | runner_coro = core.run_tasks(futures) 26 | 27 | vers, _has_failures = await main.run(result_coro, runner_coro) 28 | return vers 29 | 30 | @pytest_asyncio.fixture(scope='session') 31 | async def get_version(): 32 | async def __call__(name, config): 33 | entries = {name: config} 34 | newvers = await run(entries) 35 | if r := newvers.get(name): 36 | return r.version 37 | 38 | return __call__ 39 | 40 | 41 | async def test_cran(get_version): 42 | assert await get_version('xml2', { 43 | 'source': 'rpkgs', 44 | 'pkgname': 'xml2', 45 | 'repo': 'cran', 46 | 'md5': True, 47 | }) == '1.3.8#1864349a22fb93276bd7c5e87ade8287' 48 | 49 | async def test_bioc(get_version): 50 | assert await get_version('BiocVersion', { 51 | 'source': 'rpkgs', 52 | 'pkgname': 'BiocVersion', 53 | 'repo': 'bioc', 54 | }) == '3.21.1' 55 | 56 | async def test_bioc_data_annotation(get_version): 57 | assert await get_version('GO.db', { 58 | 'source': 'rpkgs', 59 | 'pkgname': 'GO.db', 60 | 'repo': 'bioc-data-annotation', 61 | }) == '3.21.0' 62 | 63 | async def test_bioc_data_experiment(get_version): 64 | assert await get_version('ALL', { 65 | 'source': 'rpkgs', 66 | 'pkgname': 'ALL', 67 | 'repo': 'bioc-data-experiment', 68 | }) == '1.50.0' 69 | 70 | async def test_bioc_workflows(get_version): 71 | ver = await get_version('liftOver', { 72 | 'source': 'rpkgs', 73 | 'pkgname': 'liftOver', 74 | 'repo': 'bioc-workflows', 75 | 'md5': True, 76 | }) 77 | assert ver.startswith('1.') 78 | assert '#' in ver 79 | --------------------------------------------------------------------------------