├── create-db-and-upload-action ├── Dockerfile ├── action.yml ├── upload.py ├── entrypoint.sh └── sync.py ├── README.md └── .github └── workflows └── build.yml /create-db-and-upload-action/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM archlinux:latest 2 | 3 | RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime 4 | RUN date 5 | RUN pacman -Syu --noconfirm 6 | RUN pacman -S base-devel git python rclone pyalpm --noconfirm && sed -i '/E_ROOT/d' /usr/bin/makepkg 7 | COPY entrypoint.sh /entrypoint.sh 8 | ENTRYPOINT ["/entrypoint.sh"] 9 | -------------------------------------------------------------------------------- /create-db-and-upload-action/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Generate Arch Repo database and upload' 2 | inputs: 3 | RCLONE_CONFIG_CONTENT: 4 | required: true 5 | RCLONE_CONFIG_NAME: 6 | required: false 7 | dest_path: 8 | required: true 9 | repo_name: 10 | required: true 11 | gpg-privatekey: 12 | description: 'Your GPG private key, used to sign repo.' 13 | local_path: 14 | default: './' 15 | runs: 16 | using: 'docker' 17 | image: 'Dockerfile' 18 | 19 | env: 20 | RCLONE_CONFIG_CONTENT: ${{ inputs.RCLONE_CONFIG_CONTENT }} 21 | RCLONE_CONFIG_NAME: ${{inputs.RCLONE_CONFIG_NAME}} 22 | dest_path: ${{ inputs.dest_path }} 23 | repo_name: ${{ inputs.repo_name }} 24 | gpg_key: ${{ inputs.gpg-privatekey }} 25 | local_path: ${{ inputs.local_path }} 26 | -------------------------------------------------------------------------------- /create-db-and-upload-action/upload.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | 4 | REPO_NAME = os.environ["repo_name"] 5 | ROOT_PATH = os.environ["dest_path"] 6 | CONFIG_NAME = os.environ.get("RCLONE_CONFIG_NAME", "") 7 | 8 | if CONFIG_NAME == "": 9 | result = subprocess.run(["rclone", "listremotes"], capture_output=True) 10 | CONFIG_NAME = result.stdout.decode().split("\n")[0] 11 | if not CONFIG_NAME.endswith(":"): 12 | CONFIG_NAME = CONFIG_NAME + ":" 13 | 14 | if ROOT_PATH.startswith("/"): 15 | ROOT_PATH = ROOT_PATH[1:] 16 | 17 | if __name__ == "__main__": 18 | r = subprocess.run( 19 | ["rclone", "copy", "./", f"{CONFIG_NAME}/{ROOT_PATH}", "--copy-links"], 20 | stderr=subprocess.PIPE, 21 | ) 22 | if r.returncode != 0: 23 | print("Failed when copying to remote") 24 | print(r.stderr.decode()) 25 | exit(0) 26 | -------------------------------------------------------------------------------- /create-db-and-upload-action/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | init_path=$PWD 5 | mkdir upload_packages 6 | find $local_path -type f -name "*.tar.zst" -exec cp {} ./upload_packages/ \; 7 | 8 | echo "$RCLONE_CONFIG_NAME" 9 | 10 | if [ ! -f ~/.config/rclone/rclone.conf ]; then 11 | mkdir --parents ~/.config/rclone 12 | echo "$RCLONE_CONFIG_CONTENT" >> ~/.config/rclone/rclone.conf 13 | fi 14 | 15 | if [ ! -z "$gpg_key" ]; then 16 | echo "$gpg_key" | gpg --import 17 | fi 18 | 19 | cd upload_packages || exit 1 20 | 21 | echo "::group::Adding packages to the repo" 22 | 23 | repo-add "./${repo_name:?}.db.tar.gz" ./*.tar.zst 24 | 25 | echo "::endgroup::" 26 | 27 | echo "::group::Removing old packages" 28 | 29 | python3 $init_path/create-db-and-upload-action/sync.py 30 | 31 | echo "::endgroup::" 32 | 33 | rm "./${repo_name:?}.db.tar.gz" 34 | rm "./${repo_name:?}.files.tar.gz" 35 | 36 | echo "::group::Signing packages" 37 | 38 | if [ ! -z "$gpg_key" ]; then 39 | packages=( "*.tar.zst" ) 40 | for name in $packages 41 | do 42 | gpg --detach-sig --yes $name 43 | done 44 | repo-add --verify --sign "./${repo_name:?}.db.tar.gz" ./*.tar.zst 45 | fi 46 | 47 | echo "::endgroup::" 48 | 49 | echo "::group::Uploading to remote" 50 | python3 $init_path/create-db-and-upload-action/upload.py 51 | echo "::endgroup::" -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Use GitHub Actions to build Arch packages. 2 | For more information, please read [my post](https://viflythink.com/Use_GitHubActions_to_build_AUR/) (Chinese). 3 | 4 | The uploadToOneDrive job is optional, you can use [urepo](https://github.com/vifly/urepo) to create your package repositories after uploading to OneDrive. Thanks https://github.com/vifly/arch-build/pull/8 , so you can choose other cloud storage rclone support, but the action input are changed, remember to update your secrets. 5 | 6 | # Usage 7 | The packages are located at OneDrive and GitHub releases, choose one of you like. 8 | 9 | Add the following code snippet to your `/etc/pacman.conf` (choose one): 10 | 11 | ``` 12 | # Download from OneDrive 13 | [vifly] 14 | Server = https://archrepo.viflythink.com 15 | ``` 16 | 17 | ``` 18 | # Download from GitHub releases 19 | [vifly] 20 | Server = https://github.com/vifly/arch-build/releases/latest/download 21 | ``` 22 | 23 | And import my pubkey: 24 | 25 | ```Bash 26 | wget -O /tmp/vifly-repo.key 'https://share.viflythink.com/arch-repo.key' && sudo pacman-key --add /tmp/vifly-repo.key 27 | sudo pacman-key --lsign-key viflythink@gmail.com 28 | ``` 29 | 30 | Then, run `sudo pacman -Syu` to update the repository and upgrade the system. 31 | 32 | Now you can use `sudo pacman -S ` to install packages from my repository. 33 | 34 | # TODO 35 | - [ ] some actions are too coupled, need to refactor 36 | - [ ] add more clear output log for debug 37 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: BUILD 2 | 3 | on: 4 | push: 5 | schedule: 6 | - cron: '1 */8 * * *' 7 | 8 | jobs: 9 | buildAUR: 10 | strategy: 11 | matrix: 12 | repos: [osu-lazer, weechat-notify-send, fpp, redsocks, git-cola, realesrgan-ncnn-vulkan, python-torchaudio, python-torchinfo, python-rapidocr-onnxruntime, ludusavi] 13 | include: 14 | - repos: git-cola 15 | preinstall-pkgs: "at-spi2-core" 16 | - repos: realesrgan-ncnn-vulkan 17 | preinstall-pkgs: "vulkan-intel" 18 | 19 | fail-fast: false 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | 26 | - uses: ./build-aur-action 27 | with: 28 | repo-name: ${{ matrix.repos }} 29 | preinstallPkgs: ${{ matrix.preinstall-pkgs }} 30 | 31 | - uses: actions/upload-artifact@v4 32 | with: 33 | path: ./*/*.pkg.tar.zst 34 | name: ${{ matrix.repos }} 35 | if-no-files-found: error 36 | 37 | - uses: ncipollo/release-action@v1 38 | with: 39 | allowUpdates: true 40 | tag: "packages" 41 | artifacts: "./*/*.zst" 42 | token: ${{ secrets.GITHUB_TOKEN }} 43 | 44 | uploadToOneDrive: 45 | runs-on: ubuntu-latest 46 | if: always() 47 | needs: [buildAUR] 48 | 49 | steps: 50 | - uses: actions/checkout@v4 51 | 52 | - uses: actions/download-artifact@v4 53 | id: download 54 | 55 | - name: Display structure of downloaded files 56 | run: ls -R ${{ steps.download.outputs.download-path }} 57 | 58 | - uses: ./create-db-and-upload-action 59 | with: 60 | RCLONE_CONFIG_CONTENT: ${{ secrets.RCLONE_CONFIG }} 61 | RCLONE_CONFIG_NAME: onedrive 62 | dest_path: /archrepo 63 | repo_name: vifly 64 | gpg-privatekey: ${{ secrets.gpg_private_key }} 65 | local_path: ${{ steps.download.outputs.download-path }} 66 | 67 | - uses: ncipollo/release-action@v1 68 | with: 69 | allowUpdates: true 70 | tag: "packages" 71 | artifacts: "./upload_packages/*.sig,./upload_packages/*.files,./upload_packages/*.db,./upload_packages/*.tar.gz" 72 | token: ${{ secrets.GITHUB_TOKEN }} 73 | -------------------------------------------------------------------------------- /create-db-and-upload-action/sync.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import subprocess 4 | import os 5 | import tarfile 6 | import shutil 7 | import glob 8 | import pyalpm 9 | from typing import NamedTuple 10 | from contextlib import suppress 11 | 12 | REPO_NAME = os.environ["repo_name"] 13 | ROOT_PATH = os.environ["dest_path"] 14 | CONFIG_NAME = os.environ.get("RCLONE_CONFIG_NAME", "") 15 | 16 | if CONFIG_NAME == "": 17 | result = subprocess.run(["rclone", "listremotes"], capture_output=True) 18 | CONFIG_NAME = result.stdout.decode().split("\n")[0] 19 | if not CONFIG_NAME.endswith(":"): 20 | CONFIG_NAME = CONFIG_NAME + ":" 21 | 22 | if ROOT_PATH.startswith("/"): 23 | ROOT_PATH = ROOT_PATH[1:] 24 | 25 | 26 | class PkgInfo(NamedTuple): 27 | filename: str 28 | pkgname: str 29 | version: str 30 | 31 | 32 | def get_pkg_infos(file_path: str) -> list["PkgInfo"]: 33 | """Get packages info from "*.db.tar.gz". 34 | 35 | Args: 36 | file_path (str): DB file path. 37 | 38 | Returns: 39 | list["PkgInfo"]: A list contains all packages info. 40 | """ 41 | with tarfile.open(file_path) as f: 42 | f.extractall("/tmp/extractdb") 43 | 44 | pkg_infos = [] 45 | pkgs = glob.glob("/tmp/extractdb/*/desc") 46 | for pkg_desc in pkgs: 47 | with open(pkg_desc, "r") as f: 48 | lines = f.readlines() 49 | lines = [i.strip() for i in lines] 50 | for index, line in enumerate(lines): 51 | if "%FILENAME%" in line: 52 | filename = lines[index + 1] 53 | if "%NAME%" in line: 54 | pkgname = lines[index + 1] 55 | if "%VERSION%" in line: 56 | version = lines[index + 1] 57 | 58 | pkg_infos.append(PkgInfo(filename, pkgname, version)) 59 | 60 | shutil.rmtree("/tmp/extractdb") 61 | 62 | return pkg_infos 63 | 64 | 65 | def rclone_delete(name: str): 66 | r = subprocess.run( 67 | ["rclone", "delete", f"{CONFIG_NAME}/{ROOT_PATH}/{name}"], 68 | stdout=subprocess.PIPE, 69 | stderr=subprocess.PIPE, 70 | ) 71 | if r.returncode != 0: 72 | raise RuntimeError(r.stderr.decode()) 73 | 74 | 75 | def rclone_download(name: str, dest_path: str = "./"): 76 | r = subprocess.run( 77 | [ 78 | "rclone", 79 | "copy", 80 | f"{CONFIG_NAME}/{ROOT_PATH}/{name}", 81 | dest_path, 82 | ], 83 | stdout=subprocess.PIPE, 84 | stderr=subprocess.PIPE, 85 | ) 86 | if r.returncode != 0: 87 | raise RuntimeError(r.stderr.decode()) 88 | 89 | 90 | def get_old_packages( 91 | local_packages: list["PkgInfo"], remote_packages: list["PkgInfo"] 92 | ) -> list["PkgInfo"]: 93 | old_packages = [] 94 | for l in local_packages: 95 | for r in remote_packages: 96 | if l.pkgname == r.pkgname: 97 | res = pyalpm.vercmp(l.version, r.version) 98 | if res > 0: 99 | old_packages.append(r) 100 | 101 | return old_packages 102 | 103 | 104 | def download_local_miss_files( 105 | local_packages: list["PkgInfo"], 106 | remote_packages: list["PkgInfo"], 107 | old_packages: list["PkgInfo"], 108 | ): 109 | local_files = [i.filename for i in local_packages] 110 | remote_files = [i.filename for i in remote_packages] 111 | old_files = [i.filename for i in old_packages] 112 | remote_new_files = [i for i in remote_files if i not in old_files] 113 | for r in remote_new_files: 114 | if r not in local_files and ".db" not in r and ".files" not in r: 115 | with suppress(RuntimeError): 116 | rclone_download(r) 117 | 118 | 119 | if __name__ == "__main__": 120 | r = subprocess.run( 121 | ["rclone", "size", f"{CONFIG_NAME}/{ROOT_PATH}/{REPO_NAME}.db.tar.gz"], 122 | stderr=subprocess.PIPE, 123 | stdout=subprocess.PIPE, 124 | ) 125 | if r.returncode != 0 or "Total size: 0" in r.stdout.decode(): 126 | print("Remote database file is not exist!") 127 | print( 128 | "If you are running this script for the first time, you can ignore this error." 129 | ) 130 | print(r.stderr.decode()) 131 | exit(0) 132 | 133 | local_packages = get_pkg_infos(f"./{REPO_NAME}.db.tar.gz") 134 | 135 | rclone_download(f"{REPO_NAME}.db.tar.gz", "/tmp/") 136 | remote_packages = get_pkg_infos(f"/tmp/{REPO_NAME}.db.tar.gz") 137 | 138 | old_packages = get_old_packages(local_packages, remote_packages) 139 | for i in old_packages: 140 | print(f"delete {CONFIG_NAME} {i.filename}") 141 | rclone_delete(i.filename) 142 | with suppress(RuntimeError): 143 | rclone_delete(i.filename + ".sig") 144 | 145 | download_local_miss_files(local_packages, remote_packages, old_packages) 146 | --------------------------------------------------------------------------------