├── .gitignore ├── .gitmodules ├── JavaApplicationStub-OSX ├── LICENSE ├── OSX-like-bundle ├── ExampleAppOpener.sh └── README.md ├── README.md ├── abbs-ci └── bash-check.py ├── antic └── main.py ├── aosa-report ├── .gitignore ├── README.md ├── reporter.py └── requirements.txt ├── aosc-build-analyzer.sh ├── aosc-git-wiki.sh ├── aosc-os-dpkg2rpm ├── aosc-sunxi-builder.sh ├── aosc-wiki-dumper.py ├── aoscbootstrap └── README.md ├── apt-spy ├── bootstrap-ldc.sh ├── breaker ├── breakit ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── rustfmt.toml └── src │ ├── lib.rs │ ├── main.rs │ ├── pkgcontents.rs │ └── pkgsite.rs ├── build-cross-rustc.bash ├── buildbot-benchmark ├── COPYING └── README.md ├── buildlog ├── LICENSE └── buildlog ├── bump-rel ├── COPYING ├── README.md └── bump-rel ├── chkupdate-anitya-checker.rkt ├── ciel-batch ├── COPYING ├── README.md └── ciel-batch ├── clean-topics ├── aosc-clean-topics.service └── main.py ├── collect-users ├── .gitignore ├── Cargo.lock ├── Cargo.toml └── src │ └── main.rs ├── commit-o-matic ├── COPYING ├── README.md └── commit-o-matic.sh ├── commitpacs ├── LICENSE └── commitpacs ├── copypac ├── LICENSE └── copypac ├── cp-l ├── cpan2ab.pl ├── deb-tree ├── .gitignore ├── Cargo.lock ├── Cargo.toml └── src │ └── main.rs ├── debootstrap ├── README.md └── aosc ├── diff-deb.sh ├── distro-preflight-check ├── COPYING ├── README.md ├── check-leaked-files.bash └── revoke-leaked-keys.bash ├── dropit ├── LICENSE └── dropit ├── electron ├── 0001-v8-fix-build-with-gcc7.patch ├── 0002-WebKit-fix-build-with-gcc7-full.patch ├── 0003-freetype-2.8.1-fix.patch ├── 0004-fix-build-on-glibc-2.26.patch ├── 0005-no-bundled-sysroot.patch ├── 0006-gn-bootstrap-fix.patch ├── 0007-libchromiumcontent-settings.patch ├── 0008-chromium-unset-madv_free.patch ├── 0009-chromium-use-system-re2.patch ├── 0010-chromium-use-system-ffmpeg.patch ├── 0011-chromium-gtk3.patch ├── 0012-brightray-use-system-ffmpeg.patch ├── 0013-brightray-no-whole-archive.patch ├── 0014-use-system-libraries-in-node.patch ├── additional_patches_list ├── aosc-electron-builder.sh └── collect-abflags.sh ├── find-deps ├── .gitignore ├── Cargo.lock ├── Cargo.toml └── src │ └── main.rs ├── findupd ├── README.md ├── findupd ├── rebuild.py └── update-pkgs.py ├── gen-binutils-cross.py ├── genaffect ├── COPYING ├── README.md └── genaffect ├── generate-test-queue ├── COPYING ├── README.md └── generate-test-queue ├── grow-rootfs ├── kernel-template-postinst ├── leaves.sql ├── list-affected ├── COPYING ├── README.md └── list-affected ├── loong64-it ├── COPYING ├── README.md ├── loong64-it.bash └── loongarch64-it.bash ├── loongarch-bootstrap ├── make-jdk-tarball-new.sh ├── make-rebuilds-list ├── .gitignore ├── README.md └── make-rebuilds-list ├── make-video-samples ├── COPYING ├── README.md └── make-video-samples.bash ├── mkchkupdate ├── mkfile.sh ├── mkpkgbreak ├── README.md └── mkpkgbreak ├── mkreleasetorrent.sh ├── mkrfr ├── LICENSE └── mkrfr ├── mksvgspcimen.sh ├── mktransitionpac ├── LICENSE └── mktransitionpac ├── msgtac.sh ├── patch-series-rename ├── COPYING ├── README.md └── patch-series-rename ├── pingus_font.py ├── pjs-fetch-oracle-jdk.js ├── pkg-leaves.rkt ├── pkg-prune.rkt ├── pkg2ab ├── pull-topic-as-local ├── COPYING ├── README.md └── pull-topic-as-local ├── pushpkg ├── COPYING ├── README.md ├── completions │ ├── pushpkg.bash │ └── pushpkg.fish └── pushpkg ├── qt5-repack.sh ├── qtsmerge ├── repo-manifest-helper ├── README.md └── main.py ├── repo-refresh ├── repo-refresh.service ├── repo-refresh.sh └── repo-refresh.timer ├── repository-notifier ├── README.md └── telegram.py ├── speculator ├── COPYING ├── README.md └── speculator ├── spiral ├── .gitignore ├── Cargo.lock ├── Cargo.toml └── src │ └── main.rs ├── st ├── suicide.sh ├── sz ├── translations ├── .gitignore ├── refresh-tp.py └── requirements.txt ├── univt-fonts ├── bdflib.py ├── convert_univt.py └── extract_univt.py ├── webrtc-repack ├── README.md └── make-repack.sh ├── whatlib.sh └── zhconv-merge.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # cargo artifacts 2 | target/ 3 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "pkgsite-tools"] 2 | path = pkgsite-tools 3 | url = https://github.com/AOSC-Dev/pkgsite-tools 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | All files in this repo, unless otherwise declared, are released under The 2 | MIT License. 3 | 4 | The MIT License (MIT) 5 | ===================== 6 | 7 | Copyright (c) 2015 AOSC-Dev 8 | 9 | Permission is hereby granted, free of charge, to any person obtaining a copy 10 | of this software and associated documentation files (the "Software"), to deal 11 | in the Software without restriction, including without limitation the rights 12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the Software is 14 | furnished to do so, subject to the following conditions: 15 | 16 | The above copyright notice and this permission notice shall be included in all 17 | copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 25 | SOFTWARE. 26 | -------------------------------------------------------------------------------- /OSX-like-bundle/ExampleAppOpener.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # An example script which shows the concept of cross-platform .app bundling. 3 | # Copyright (c) 2014 Arthur Wang 4 | 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 2 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | 18 | 19 | # OS="Linux_x86_64 Linux_i386" 20 | OS="MacOS" # Well of course I can simply copy that OpenRA shell.. 21 | 22 | show_help() { 23 | echo -e "$0 /path/to/app args-to-app\tOpens an application." 24 | echo -e "$0 --version\t\tPrints version info." 25 | } 26 | 27 | show_version() { 28 | echo -e "$0 version 0.0.0, An example script which shows the concept of cross-platform .app bundling." 29 | echo -e "Copyright (c) 2014 Arthur Wang " 30 | echo -e "This Shell script is released under the terms of the GNU General Public License," 31 | echo -e "version 2 or or (at your option) any later version." 32 | } 33 | 34 | die_hard() { 35 | echo -e "ERROR: $1" >&2 36 | echo "More information might be available at:" >&2 37 | echo " Use the source, Luke." >&2 38 | [ "$2" ] && exit_code=$2 || exit_code=1 39 | exit $exit_code 40 | } 41 | 42 | # Wait a minute while I figure out how libplist-utils work. 43 | 44 | if [ "$1" == "" ]; then show_help >&2; exit 1; fi 45 | if [ "$1" == "--version" ]; then show_version; exit 0; fi 46 | if [ "$1" == "--help]" ]; then show_help; exit 0; fi 47 | 48 | if [ -d $1/Contents/Resources ] 49 | then 50 | cd $1/Contents/Resources || die_hard "Permission Denied: cannot chdir into bundle." 51 | ( file ../Info.plist | grep binary ) && (mv ../Info.plist ../Info.plist.bak && plistutil -i ../Info.plist.bak -o ../Info.plist || die_hard "cannot convert plist.\nDo you have plistutil, bro?") || true 52 | shift 53 | # Try one-by-one, using for 54 | # (How can we know it works?) 55 | # Else use darling. 56 | ../${OS}/$(xml2 < ../Info.plist | grep -A1 CFBundleExecutable | tail -n 1 | cut -f 2 -d "=") $* 57 | else die_hard "Invalid .app bundle!" 58 | fi 59 | 60 | # -*- vim:fenc=utf-8:shiftwidth=2:softtabstop=2:autoindent 61 | -------------------------------------------------------------------------------- /OSX-like-bundle/README.md: -------------------------------------------------------------------------------- 1 | Cross-platform .app bundle 2 | ==== 3 | 4 | **DO NOT WORK ON THIS THING**. GnuStep has a [multiarch bundle format](https://fedoraproject.org/wiki/PackagingDrafts/GNUstep#.22Fat.22_unflattened_layout) 5 | that already does exactly what I want to do. It's been there for a long time. 6 | 7 | I might be drunk when I wrote this. 8 | 9 | The .app directory will has a layout almost the same to the one OS X .app 10 | bundle has, but with a ${OS}/ (OS\_ARCH) folder instead of the MacOS/ 11 | folder. Of course it can support a fat bundle with MacOS/ and many other Arches. 12 | 13 | Dependencies 14 | --- 15 | This script Depends on: 16 | - libplist-utils (plistutil) 17 | - xml2 18 | - - libxml/libxml2 19 | - bash 20 | 21 | Why do we need the bundle? 22 | --- 23 | Bundling often makes distributing applications easier. In OS X, bundling 24 | is also an important solution to provide built-in application icons and so 25 | on. 26 | 27 | But why are we using the OS X bundle format? 28 | --- 29 | The current OS X format isn't really platform-specific. Of course it can be 30 | simply extended, just like what I am doing now. 31 | 32 | 33 | So...How will it be like? 34 | --- 35 | Here is a directory tree of an example bundle with multi-platform support: 36 |
37 | MyApp.app/
38 |   Contents/
39 |     Info.plist
40 |     MacOS/
41 |       MyApp
42 |     Linux_i386/
43 |       MyApp
44 |       Frameworks/
45 |         (Linux_i386 libs)
46 |     Linux_x86_64/
47 |       MyApp
48 |       Frameworks/
49 |         (Linux_x86_64 libs)
50 |     DotNET/ (I'm kidding)
51 |       MyApp.exe
52 |       App_Sup.dll
53 |     Resources/
54 |       foo.tiff
55 |       bar.lproj/
56 |     Frameworks/
57 |       (OS X libs)
58 |     Frameworks_common/
59 |     PlugIns/
60 |       (OS X PlugIns)
61 |     PlugIns_common/
62 |     SharedSupport/
63 | 
64 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | scriptlets 2 | ========== 3 | 4 | AOSC Scriptlets. 5 | -------------------------------------------------------------------------------- /abbs-ci/bash-check.py: -------------------------------------------------------------------------------- 1 | import bashvar 2 | import sys 3 | import pyparsing as pp 4 | from pathlib import Path 5 | import warnings 6 | 7 | warnings.simplefilter("ignore") 8 | 9 | failed = False 10 | 11 | def lint_file(filepath): 12 | global failed 13 | whole_file = open(filepath, 'r').read() 14 | 15 | try: 16 | _ = bashvar.eval_bashvar_literal(whole_file) 17 | except pp.ParseException as e: 18 | lineno = str(e.lineno) 19 | print(filepath + ':' + str(e.lineno)) 20 | print('\t' + e.line) 21 | print() 22 | failed = True 23 | 24 | tree = Path(sys.argv[1]) 25 | 26 | for category in tree.iterdir(): 27 | if not category.is_dir() or category.name == 'groups': 28 | continue 29 | for package in category.iterdir(): 30 | if not package.is_dir(): 31 | continue 32 | 33 | spec = package.joinpath('spec') 34 | if spec.exists(): 35 | lint_file(str(spec)) 36 | 37 | defines = package.joinpath('autobuild', 'defines') 38 | if defines.exists(): 39 | lint_file(str(defines)) 40 | 41 | for sub_package in package.iterdir(): 42 | if not sub_package.is_dir() or sub_package.name == 'autobuild': 43 | continue 44 | defines = sub_package.joinpath('autobuild', 'defines') 45 | if defines.exists(): 46 | lint_file(str(defines)) 47 | 48 | exit(1 if failed else 0) 49 | -------------------------------------------------------------------------------- /aosa-report/.gitignore: -------------------------------------------------------------------------------- 1 | /venv 2 | /.mypy_cache 3 | -------------------------------------------------------------------------------- /aosa-report/README.md: -------------------------------------------------------------------------------- 1 | ## AOSA Report Generator 2 | 3 | ### How to "install" 4 | 5 | First create a Python 3 venv: `python3 -m venv venv` 6 | 7 | Now if you are using Bash, run `source venv/bin/activate`; if you are using ZSH, run `source venv/bin/activate.zsh`; for FISH users: `source venv/bin/activate.fish`. 8 | 9 | Then install the dependencies: `pip install -r requirements.txt` 10 | 11 | ### How to setup 12 | 13 | 1. Open the `reporter.py` with your favorite text/code editor 14 | 1. Navigate to line 11, and you will see `AFTER_DATE=...`, change the date value to the start of the reporting cycle (the issues *after* this date will be collected) 15 | 1. Save and close the file 16 | 1. You now need to prepare your GitHub access token. This is required as the anonymous API quota will run out very fast. It's recommended to create a new token for this specific script, a token without any permission scope should do the job. 17 | 18 | ### Usage 19 | 20 | After setting up the script, you can now run it with `TOKEN= python3 reporter.py > generated.txt`. The result will be stored in `generated.txt` also take notice of any warnings issued during the run. 21 | 22 | You need to pay extra attention to issues printed out during the run when reviewing the generated bulletin. 23 | 24 | Also if you want to strip out all the Markdown elements in the generated file, your best bet would be using Pandoc. Here is an example how you may accomplish the job: `pandoc -f markdown -t plain --wrap=none generate.txt -o filtered.txt`. 25 | 26 | -------------------------------------------------------------------------------- /aosa-report/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2019.9.11 2 | chardet==3.0.4 3 | idna==2.8 4 | requests==2.22.0 5 | six==1.12.0 6 | urllib3==1.25.8 7 | webencodings==0.5.1 8 | PyGithub==1.44 9 | -------------------------------------------------------------------------------- /aosc-build-analyzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This analyzer trys to create the appropriate flattened ab3 notition of a 3 | # given build script. 4 | # This is a dump of _misaka_base_brain_tmp_prog. 5 | 6 | # We export the special builds as functions that record the calls, and then 7 | # uses a ( subshell ) to let the build script touch the functions. 8 | 9 | STEP=0 10 | export PKGDIR='#{PKGDIR}' SRCDIR='#{SRCDIR}' 11 | # BUILD STEPS 12 | # =========== 13 | # 14 | # 0 build_start 15 | # 1 [configure] 16 | # 2 build_ready 17 | # 3 [make] 18 | # 4 build_final 19 | # 5 [make install] 20 | # 6 beyond 21 | 22 | shopt -s extglob expand_aliases 23 | 24 | export STATES="$(mktemp -d)" 25 | 26 | # Sources from the stdin. 27 | __source_stdin(){ 28 | local __tmpf="$(mktemp)" 29 | echo "$(<&0)" > "$__tmpf" 30 | . "$__tmpf" "$@" 31 | rm "$__tmpf" 32 | } 33 | 34 | # Note that aliases are expanded as the lines are read. 35 | alias __recordlineno=' 36 | echo -n "${BASH_LINENO[${__LINENO_OVERRIDE:--2}]} $__detected_type $__anchor_type $__anchor_name $(printf '%q ' "$@")" | tee "$STATES/log" >> "$STATES"/parse 37 | ' 38 | 39 | ./configure(){ 40 | __detected_type=autotools 41 | __anchor_type=configure 42 | local __anchor_name="${__anchor_name-./configure}" 43 | __recordlineno 44 | exit 0 45 | } 46 | 47 | cmake(){ 48 | __detected_type=cmake 49 | __anchor_type=configure 50 | local __anchor_name="${__anchor_name-cmake}" 51 | __recordlineno 52 | exit 0 53 | } 54 | 55 | qmake(){ 56 | __detected_type=qtproject 57 | __anchor_type=configure 58 | local __anchor_name="${__anchor_name-qmake}" 59 | __recordlineno 60 | exit 0 61 | } 62 | 63 | perl(){ 64 | [[ "$1" == Makefile* ]] || return 0 65 | } 66 | 67 | python2(){ 68 | case "$1" in 69 | (build) ;; 70 | (install) ;; 71 | esac 72 | } 73 | alias python3='__anchor_name=python3 python2' 74 | 75 | make(){ 76 | 77 | } 78 | 79 | command_not_found_handler(){ 80 | local __LINENO_OVERRIDE=-3 81 | case "$1" in 82 | (*/configure) 83 | echo "Resolved command-not-found -> configure" | tee "$STATES/log" 84 | __anchor_name="$1" ./configure "$@";; 85 | esac 86 | } 87 | 88 | curscript=build 89 | while [ -e "$curscript" ]; do 90 | # Invoke the currently-cut script 91 | # Read the last line of $STATE/parse and segment it into the temp dir 92 | # If there is still something left, go on. 93 | done 94 | 95 | exec 4<>"$STATES"/parse 96 | while IFS=$'\t' read -u 4 line type name args; do 97 | # Do deeper considerations, combine the type guesses 98 | done 99 | 100 | # rm -r "$STATES" -------------------------------------------------------------------------------- /aosc-git-wiki.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # aosc-git-wiki.sh: gh wiki utils 3 | # Converts from/to dir layouts and flat GitHub wiki layout. 4 | # A special line should be used. Otherwise we would guess it. 5 | shopt -s extglob globstar || exit 2 6 | : ${ghwiki=.githubwiki} ${docdir=doc} 7 | die(){ echo "$1"; exit "${2-1}"; } 8 | [ -e .git ] || die "Git will fail without .git" 9 | 10 | declare -A remotes 11 | 12 | update_remote(){ 13 | local IFS=$'\t' 14 | git remote -v | while read name path; do remotes["$name"]=("${path%(*)}"); done 15 | } 16 | 17 | update_remote 18 | 19 | github_origin(){ 20 | echo "${remote[github]:-${remote[origin]}}" 21 | } 22 | 23 | github_wikidir(){ 24 | echo "${1%%.git}.wiki" 25 | } 26 | 27 | if [ ! -d "$ghwiki" ]; then 28 | [ -e "$ghwiki" ] && die "$ghwiki/ Wrong type.." 29 | git submodule add "$(github_wikidir "$(github_origin)")" || die WTF 30 | fi 31 | 32 | if [ ! -d "$docdir" ]; then 33 | [ -e "$docdir" ] && die "$docdir/ Wrong type.." 34 | mkdir -p "$docdir" 35 | fi 36 | 37 | get_mark(){ 38 | local k="$(tail -n 1 "$1")" || return $? 39 | [[ "$k" != \*}" 42 | } 43 | 44 | collapse(){ 45 | cd "$docdir" 46 | local IFS=$'\n' 47 | # TODO: pandoc convertion: We write in pandoc md and convert to GH md. 48 | to_github $(find . -name '*.md') 49 | cd - 50 | } 51 | 52 | to_github(){ 53 | local i temp IFS=$' \t\n' temp2 54 | for i; do 55 | title='' 56 | local "$(get_mark "$i")" 57 | if [ -n "$title" ]; then 58 | # Guess it 59 | head "$i" | while read temp; do case "$temp" in 60 | (\#*) title="${temp/\#?( )}"; break;; 61 | ([A-Za-z]*) title="$temp" temp2='wait-hr';; 62 | (====*) [[ "$temp2" == wait-hr ]] && break;; 63 | esac 64 | done 65 | fi 66 | # I believe that I should look at the Filename then. 67 | [ "$title" ] || ! echo "Failed to eat $i since we don't know the title.">&2 || continue 68 | # And we should do a mv here. 69 | # And echo >> when there is not such tag or some info is missing. 70 | done 71 | } 72 | 73 | from_github(){ 74 | # The reverse 75 | } 76 | -------------------------------------------------------------------------------- /aosc-os-dpkg2rpm: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | # Super crude and simple for now, just to show the basic ideas of 3 | # how I actually built RPM builds of AOSC OS - and from what I can 4 | # see so far, this method works just fine. 5 | ##@author 2015 Mingcong Bai (JeffBai) . 6 | ##@author 2015 Mingye Wang (Arthur2e5) . 7 | ##@author 2013 Mikko Rantalainen . (error handler) 8 | ##@copyright MIT-1.0 9 | 10 | ##Error handler by Mikko Rantalainen. 11 | on_error() { 12 | local parent_lineno="$1" 13 | local message="$3" 14 | local code="$2" 15 | if [[ -n "$message" ]] ; then 16 | echo "Error on or near line ${parent_lineno}: ${message}; Code ${code}" 17 | else 18 | echo "Error on or near line ${parent_lineno}; Code ${code}" 19 | fi >&2 20 | echo "This error is triggered by \`sh -e' in the script; to override it, run `sh "$0"` instead." 21 | exit $code 22 | } 23 | trap 'on_error ${LINENO} $?' ERR 24 | 25 | REPO=https://repo.anthonos.org 26 | # Get needed tools for RPM conversion 27 | apt update 28 | apt install zypper --yes 29 | 30 | # Remove PackageKit as it is not supported on RPM for now 31 | apt purge packagekit gnome-packagekit apper muon-explorer --yes 32 | 33 | # Get the list of DPKG packages with "Installed" state, specifically: 34 | # Includes Zypper at this point, of course... 35 | dpkg -l | grep ^ii | cut -d' ' -f 2 > /run/aosc-dpkg-list 36 | 37 | # Configure Zypper repositories 38 | zypper ar "$REPO/os3-next/os3-rpm" "AOSC OS3" 39 | 40 | # Install RPM packages 41 | zypper refresh 42 | zypper install $(cat /run/aosc-dpkg-list) 43 | 44 | # Now purge DPKG and Apt from the system 45 | apt purge apt dpkg --force-yes # Does this skip "Yes, do as I say!"? 46 | 47 | # And clean up... 48 | rm -rf /var/cache/apt 49 | rm -rf /var/lib/{dpkg,apt,PackageKit} 50 | 51 | # Boom, and done 52 | echo "Conversion complete!" 53 | -------------------------------------------------------------------------------- /aosc-wiki-dumper.py: -------------------------------------------------------------------------------- 1 | """ 2 | This script was used to dump the wiki contents of the Wiki.js database 3 | To use it, just change the "dbname=[db] user=[user]" string below 4 | The files will be saved to the current working directory 5 | """ 6 | 7 | import psycopg2 8 | import os 9 | 10 | QUERY = """ 11 | SELECT p.path, p."localeCode", format(E'+++\ntitle = "%s"\ndescription = "%s"\ndate = %s\ntags = %s\n+++\n\n%s', p.title, p.description, trim(both '"' from to_json(p."createdAt")::TEXT), json_agg(t.title), p.content) 12 | FROM pages p, "pageTags" s, tags t 13 | WHERE s."pageId" = P.id AND t.id = s."tagId" GROUP BY p.id; 14 | """ 15 | 16 | conn = psycopg2.connect("dbname=[db] user=[user]") 17 | cur = conn.cursor() 18 | cur.execute(QUERY) 19 | results = cur.fetchall() 20 | for path, locale, content in results: 21 | if locale: 22 | path = os.path.join(locale, path) 23 | os.makedirs(os.path.dirname(path), exist_ok=True) 24 | with open(path + '.md', 'wt') as f: 25 | f.write(content) 26 | 27 | cur.close() 28 | conn.close() 29 | -------------------------------------------------------------------------------- /aoscbootstrap/README.md: -------------------------------------------------------------------------------- 1 | # AOSCBootstrap 2 | 3 | AOSCBootstrap has been moved to a separate repository. You can find it at https://github.com/AOSC-Dev/aoscbootstrap. 4 | -------------------------------------------------------------------------------- /apt-spy: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # AOSC apt-spy equiviant. Reads mirror list from http://www.anthonos.org/mirrors.list and performs speed tests. 3 | # apt-spy.conf should be: 4 | # # apt-spy.conf START 5 | # Sources="os2 os2-anthonos" (Something like that) 6 | # Method=[ ping | wget ] (Defines how to perform tests) 7 | # MainURL=http://mirror.anthonos.org/mirrors.list 8 | # # End of apt-spy.conf / DEFAULTCONF. 9 | # -*- vim:fenc=utf-8:shiftwidth=2::softtabstop=2:autoindent 10 | 11 | # Copyright (C) 2006-2012 Bart Martens # I copied die_hard() from update-flashplugin-nonfree... 12 | # Copyright (C) 2014 Arthur Wang 13 | # 14 | # This program is free software; you can redistribute it and/or modify 15 | # it under the terms of the GNU General Public License version 2 as 16 | # published by the Free Software Foundation. 17 | # 18 | # This program is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 21 | # GNU General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU General Public License 24 | # along with this program. If not, see . 25 | 26 | 27 | 28 | set -e 29 | 30 | ap(){ printf '$q ' "$@"; } 31 | vd(){ echo "$1=$(ap $2)"; } 32 | return_0() { return 0; } 33 | trap "return_0" 0 34 | 35 | die_hard() { 36 | echo -e "ERROR: $1" >&2 37 | echo "More information might be available at:" >&2 38 | echo " https://github.com/...../wiki" >&2 39 | exit 1 40 | } 41 | 42 | ((EUID)) && die_hard "must be root (or hack the script)" 43 | 44 | show_usage() { 45 | echo "Usage:" 46 | echo " apt-spy --update" 47 | echo " apt-spy --restore" 48 | echo "Additional options:" 49 | echo " --verbose" 50 | echo " --quiet" 51 | echo " --ping" 52 | echo " --wget" 53 | exit 1 54 | } 55 | 56 | testping() { 57 | pingregex='= [^/]*/([0-9]+\.[0-9]+)' # Regular Expression, see Wikipedia. (正则表达式) 58 | [[ $(ping -q -c 4 $1) =~ $pingregex ]] && printf "${BASH_REMATCH[1]:1}"|| printf "fail" # Wikipedia Bash: Regex. 59 | } 60 | 61 | testwget() { 62 | # Needs to be reviewed. 63 | # Using time may skip the 404 checking; but using sed seems to be complicated for me. 64 | # Of course I am happy to make everything something like a string of number giving the time taken. 65 | ( TIMEFORMAT='%3R' time wget -nv -O /dev/null http://$1/path/to/test/file ) 2>&1 | grep real # | sed -e 's|^.*(\([0-9.]\+ [KM]B/s\)).*$|\1|' || printf "0" 66 | } 67 | 68 | sort_and_select() { 69 | j=0 70 | for i in $Mirrors; do 71 | ((j++)) 72 | Mirror_$j="$(test${Method} $i)" 73 | [ "Mirror_$j" == "fail"] && FAILEDMIRRORS="$i $FAILEDMIRRORS" || ( TESTEDMIRRORS="$i $TESTEDMIRRORS"; Time="$i $Time" ) 74 | done 75 | for ((i=0;i /etc/apt-spy.conf 110 | echo "Config not found. Writing Defaults." 111 | fi 112 | 113 | getopt_temp=`getopt -o urpwvq --long update,backup,ping,wget,verbose,quiet -n 'apt-spy' -- "$@"` || show_usage 114 | eval set -- "$getopt_temp" || show_usage 115 | 116 | while true 117 | do 118 | case "$1" in 119 | -u|--update) 120 | ACTION="update" 121 | shift 122 | ;; 123 | -r|--restore) 124 | ACTION="restore" 125 | shift 126 | ;; 127 | -p|--ping) 128 | Method=ping 129 | shift 130 | ;; 131 | -w|--wget) 132 | Method=wget 133 | shift 134 | ;; 135 | -v|--verbose) 136 | verbose=yes 137 | shift 138 | ;; 139 | -q|--quiet) 140 | quiet=yes 141 | shift 142 | ;; 143 | --) 144 | shift 145 | break 146 | ;; 147 | *) 148 | echo "Internal error!" 149 | exit 1 150 | ;; 151 | esac 152 | done 153 | [ "$ACTION" != "none" -a $# -eq 0 ] || show_usage 154 | [ "$verbose" != "yes" ] || echo "options: $getopt_temp" 155 | 156 | 157 | case "$ACTION" in 158 | update) 159 | mv /etc/apt/sources.list ~/sources.list.bak # Do a backup 160 | Mirrors="`wget -nv -O - http://mirror.anthonos.org/mirrors.list`" || ((echo "Trying to use local mirror list" && Mirrors="`cat /etc/apt/mirrors.list`") || die_hard "AOSC apt-spy can\'t get a mirror list.") 161 | sort_and_select 162 | echo -e "# sources.list genereted by AOSC apt-spy.\n# See:\`$0 --help\` for more information." > /etc/apt/sources.list || die_hard "Cannot write to sources.list, please check permissions:\n`ls -alh /etc/apt`" 163 | savesuccess >> /etc/apt/sources.list 164 | commentfail >> /etc/apt/sources.list 165 | ;; 166 | restore) 167 | mv ~/sources.list.bak /etc/apt/sources.list || die_hard "No backup files found. Make sure sources.list.bak is inside your \$HOME directory." 168 | ;; 169 | esac 170 | -------------------------------------------------------------------------------- /bootstrap-ldc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | GDMD_WRAPPER="https://cdn.jsdelivr.net/gh/D-Programming-GDC/gdmd@ff2c97a47408fb71c18a2d453294d18808a97cc5/dmd-script" 4 | TREE_DIR="/tree/extra-dlang/ldc/" 5 | 6 | if [ ! -d /tree ]; then 7 | echo '[!] Must be run from a Ciel container!' 8 | exit 1 9 | fi 10 | 11 | echo '[+] Installing GDMD wrapper for gdc ...' 12 | wget "$GDMD_WRAPPER" -O /usr/bin/gdmd 13 | chmod a+x /usr/bin/gdmd 14 | 15 | echo '[+] Removing ldc ...' 16 | apt-get purge ldc || true 17 | sed -i "s| ldc||" "${TREE_DIR}01-liblphobos/defines" 18 | 19 | echo '[+] Patching LDC building scripts ...' 20 | cat << 'EOF' | perl - 21 | my $filename = '/tree/extra-dlang/ldc/01-liblphobos/build'; 22 | my $regex = qr/cmake \.\..+?ninja/msp; 23 | my $subst = 'cmake .. -GNinja -DD_COMPILER=/usr/bin/gdmd -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr;ninja'; 24 | 25 | my $file_content = do{local(@ARGV,$/)=$filename;<>}; 26 | my $result = $file_content =~ s/$regex/$subst/rg; 27 | open(FH, '>', $filename) or die $!; 28 | 29 | print FH "$result\n"; 30 | EOF 31 | 32 | echo '[+] Bootstrapping LDC ...' 33 | acbs-build ldc 34 | 35 | echo '[+] Restoring Git tree ...' 36 | pushd /tree 37 | git checkout -f 'extra-dlang/ldc/' 38 | popd 39 | rm -v /usr/bin/gdmd 40 | 41 | echo '[+] Re-building LDC with LDC ...' 42 | acbs-build ldc 43 | -------------------------------------------------------------------------------- /breakit/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "breakit" 3 | version = "0.1.0" 4 | edition = "2024" 5 | description = "BreakIt: AOSC OS package rebuild collector" 6 | authors = ["xtex "] 7 | license = "GPL-3.0-or-later" 8 | 9 | [dependencies] 10 | anyhow = "1.0.98" 11 | bytes = "1.10.1" 12 | clap = { version = "4.5.37", features = ["env", "derive"] } 13 | console = "0.15.11" 14 | git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] } 15 | libabbs = "0.1.7" 16 | regex = { version = "1.11.1", default-features = false, features = ["std", "perf"] } 17 | reqwest = { version = "0.12.15", features = ["json"] } 18 | serde = { version = "1.0.219", features = ["derive"] } 19 | tokio = { version = "1.44.2", features = ["full"] } 20 | zstd = "0.13.3" 21 | 22 | [package.metadata.release] 23 | pre-release-commit-message = "{{crate_name}}: release {{version}}" 24 | consolidate-commits = false 25 | allow-branch = ["main"] 26 | sign-commit = true 27 | -------------------------------------------------------------------------------- /breakit/rustfmt.toml: -------------------------------------------------------------------------------- 1 | hard_tabs = true 2 | max_width = 80 3 | -------------------------------------------------------------------------------- /breakit/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashSet, fs, str::FromStr, sync::Arc}; 2 | 3 | use anyhow::{Result, anyhow, bail}; 4 | use libabbs::{ 5 | apml::{ 6 | ApmlContext, 7 | value::{array::StringArray, union::Union}, 8 | }, 9 | tree::{AbbsSourcePackage, AbbsTree}, 10 | }; 11 | use regex::Regex; 12 | use tokio::task::JoinSet; 13 | 14 | mod pkgcontents; 15 | mod pkgsite; 16 | 17 | #[derive(Debug)] 18 | pub struct PkgBreakContext { 19 | pub abbs: AbbsTree, 20 | pub http_client: reqwest::Client, 21 | } 22 | 23 | impl PkgBreakContext { 24 | /// Selects a set of packages to be rebuilt. 25 | /// 26 | /// Note that the produced list are not filtered 27 | /// and may include packages that have been dropped 28 | /// from the repository. However, it must never include 29 | /// the trigger package itself. 30 | pub async fn select( 31 | self: &Arc, 32 | package: &AbbsSourcePackage, 33 | kind: &str, 34 | ) -> Result> { 35 | let kind = kind.to_ascii_uppercase(); 36 | 37 | let mut result = HashSet::new(); 38 | let mut jobs = JoinSet::new(); 39 | 40 | let spec = fs::read_to_string(package.join("spec"))?; 41 | let spec_ctx = ApmlContext::eval_source(&spec)?; 42 | let pkgrebuild = spec_ctx 43 | .get(&format!("PKGREBUILD__{kind}")) 44 | .or_else(|| spec_ctx.get("PKGREBUILD")) 45 | .map(|val| StringArray::from(val.as_string())) 46 | .map(|val| { 47 | val.unwrap() 48 | .into_iter() 49 | .map(|dir| Directive::from_str(&dir)) 50 | .collect() 51 | }) 52 | .unwrap_or_else(|| { 53 | if kind == "ABI" { 54 | Ok(vec![ 55 | Directive::PackageDependents(None), 56 | Directive::LibraryDependents(None), 57 | ]) 58 | } else { 59 | Ok(vec![]) 60 | } 61 | })?; 62 | 63 | for directive in pkgrebuild { 64 | // fast-path for pkg directives 65 | if let Directive::Package(pkg) = &directive { 66 | result.insert(pkg.to_string()); 67 | continue; 68 | } 69 | 70 | let ctx = self.clone(); 71 | let package = package.clone(); 72 | jobs.spawn(async move { 73 | ctx.select_directive(&package, &directive).await 74 | }); 75 | } 76 | 77 | while let Some(part) = jobs.join_next().await { 78 | let part = part?; 79 | let part = part?; 80 | result.extend(part); 81 | } 82 | 83 | result.remove(package.name()); 84 | Ok(result) 85 | } 86 | 87 | /// Selects a set of packages to be rebuilt. 88 | pub async fn select_directive( 89 | &self, 90 | package: &AbbsSourcePackage, 91 | directive: &Directive, 92 | ) -> Result> { 93 | match directive { 94 | Directive::LibraryDependents(pkg) => { 95 | let deps = pkgsite::find_deps( 96 | &self.http_client, 97 | &pkg.to_owned() 98 | .unwrap_or_else(|| package.name().to_string()), 99 | false, 100 | ) 101 | .await?; 102 | Ok(deps) 103 | } 104 | Directive::PackageDependents(pkg) => { 105 | let deps = pkgsite::find_deps( 106 | &self.http_client, 107 | &pkg.to_owned() 108 | .unwrap_or_else(|| package.name().to_string()), 109 | true, 110 | ) 111 | .await?; 112 | Ok(deps) 113 | } 114 | Directive::PathPattern(regex) => { 115 | pkgcontents::find_deps(&self.http_client, regex).await 116 | } 117 | Directive::Section(section) => { 118 | let mut result = HashSet::new(); 119 | for package in self.abbs.section_packages(§ion.into())? { 120 | for package in package.subpackages()? { 121 | result.insert(package.name()?); 122 | } 123 | } 124 | Ok(result) 125 | } 126 | Directive::Package(pkg) => Ok(HashSet::from([pkg.clone()])), 127 | Directive::PackagePattern(regex) => { 128 | let mut result = HashSet::new(); 129 | for package in self.abbs.all_packages()? { 130 | for package in package.subpackages()? { 131 | let name = package.name()?; 132 | if regex.is_match(&name) { 133 | result.insert(name); 134 | } 135 | } 136 | } 137 | Ok(result) 138 | } 139 | } 140 | } 141 | } 142 | 143 | /// A PKGREBUILD selector directive. 144 | #[derive(Debug, Clone)] 145 | pub enum Directive { 146 | /// Shared-library dependents. 147 | LibraryDependents(Option), 148 | /// Reverse dependents. 149 | PackageDependents(Option), 150 | /// Packages providing files matching the pattern. 151 | PathPattern(Regex), 152 | /// Packages in a certain section. 153 | Section(String), 154 | /// A certain package. 155 | Package(String), 156 | /// Packages matching the pattern. 157 | PackagePattern(Regex), 158 | } 159 | 160 | impl FromStr for Directive { 161 | type Err = anyhow::Error; 162 | 163 | fn from_str(s: &str) -> std::result::Result { 164 | let un = Union::try_from(s)?; 165 | match un.tag.as_str() { 166 | "sodep" => Ok(Self::LibraryDependents(un.argument)), 167 | "revdep" => Ok(Self::PackageDependents(un.argument)), 168 | "path" => { 169 | Ok(Self::PathPattern(Regex::new(&un.argument.ok_or_else( 170 | || anyhow!("path directive must have an argument"), 171 | )?)?)) 172 | } 173 | "section" => Ok(Self::Section(un.argument.ok_or_else(|| { 174 | anyhow!("section directive must have an argument") 175 | })?)), 176 | "pkg" => Ok(Self::Package(un.argument.ok_or_else(|| { 177 | anyhow!("pkg directive must have an argument") 178 | })?)), 179 | "pkgpattern" => { 180 | Ok(Self::PackagePattern(Regex::new(&un.argument.ok_or_else( 181 | || anyhow!("pkgpattern directive must have an argument"), 182 | )?)?)) 183 | } 184 | _ => bail!("unsupported tag in PKGREBUILD directive"), 185 | } 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /breakit/src/pkgcontents.rs: -------------------------------------------------------------------------------- 1 | //! Helpers to scan the `Contents` file of a APT repository. 2 | 3 | use core::str; 4 | use std::{collections::HashSet, env}; 5 | 6 | use anyhow::Result; 7 | use bytes::Buf; 8 | use regex::Regex; 9 | 10 | pub(crate) async fn find_deps( 11 | client: &reqwest::Client, 12 | pattern: &Regex, 13 | ) -> Result> { 14 | let mut packages = HashSet::new(); 15 | let repo_base = env::var("BREAKIT_REPO") 16 | .unwrap_or_else(|_| "https://repo.aosc.io".into()); 17 | 18 | for arch in ["all", "amd64", "arm64"] { 19 | // TODO: cache Contents file 20 | let resp = client 21 | .execute( 22 | client 23 | .get(format!( 24 | "{repo_base}/debs/dists/stable/main/Contents-{arch}.zst" 25 | )) 26 | .build()?, 27 | ) 28 | .await? 29 | .error_for_status()? 30 | .bytes() 31 | .await?; 32 | let resp = zstd::decode_all(resp.reader())?; 33 | let resp = String::from_utf8(resp)?; 34 | for line in resp.lines() { 35 | let line_bytes = line.as_bytes(); 36 | let mut idx = line_bytes.len() - 1; 37 | while idx > 0 && line_bytes[idx] != b' ' { 38 | idx -= 1; 39 | } 40 | if idx == 0 { 41 | continue; 42 | } 43 | let path = str::from_utf8(&line_bytes[0..idx])?.trim_ascii_end(); 44 | if pattern.is_match(path) { 45 | let pkg = str::from_utf8(&line_bytes[idx + 1..])?; 46 | for pkg in pkg.split(',') { 47 | let pkg = pkg.split('/').next_back().unwrap_or(pkg); 48 | packages.insert(pkg.to_string()); 49 | } 50 | } 51 | } 52 | } 53 | 54 | Ok(packages) 55 | } 56 | -------------------------------------------------------------------------------- /breakit/src/pkgsite.rs: -------------------------------------------------------------------------------- 1 | //! AOSC OS packages site APIs. 2 | 3 | use std::collections::HashSet; 4 | 5 | use anyhow::Result; 6 | use serde::Deserialize; 7 | 8 | pub(crate) async fn find_deps( 9 | client: &reqwest::Client, 10 | package: &str, 11 | sodep: bool, 12 | ) -> Result> { 13 | let resp = client 14 | .execute( 15 | client 16 | .get(format!( 17 | "https://packages.aosc.io/revdep/{package}?type=json" 18 | )) 19 | .build()?, 20 | ) 21 | .await? 22 | .error_for_status()? 23 | .json::() 24 | .await?; 25 | let mut packages = HashSet::new(); 26 | 27 | if !sodep { 28 | for group in resp.revdeps { 29 | for dep in group.deps { 30 | packages.insert(dep.package); 31 | } 32 | } 33 | } else { 34 | for group in resp.sobreaks { 35 | packages.extend(group); 36 | } 37 | packages.extend(resp.sobreaks_circular); 38 | } 39 | 40 | Ok(packages) 41 | } 42 | 43 | #[derive(Debug, Deserialize)] 44 | struct PackageJson { 45 | #[serde(default)] 46 | revdeps: Vec, 47 | #[serde(default)] 48 | sobreaks: Vec>, 49 | #[serde(default)] 50 | sobreaks_circular: Vec, 51 | } 52 | 53 | #[derive(Debug, Deserialize)] 54 | struct DependencyGroup { 55 | deps: Vec, 56 | } 57 | 58 | #[derive(Debug, Deserialize)] 59 | struct Dependency { 60 | package: String, 61 | } 62 | -------------------------------------------------------------------------------- /build-cross-rustc.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Usage information. 4 | _help_message() { 5 | printf "\ 6 | Usage: 7 | 8 | $0 AOSC_ARCH 9 | 10 | - AOSC_ARCH: AOSC OS architecture (amd64, arm64, loongson3, etc.). 11 | 12 | " 13 | } 14 | 15 | # Preformatted echo. 16 | abwarn() { echo -e "[\e[33mWARN\e[0m]: \e[1m$*\e[0m"; } 17 | aberr() { echo -e "[\e[31mERROR\e[0m]: \e[1m$*\e[0m"; exit 1; } 18 | abinfo() { echo -e "[\e[96mINFO\e[0m]: \e[1m$*\e[0m"; } 19 | abdbg() { echo -e "[\e[32mDEBUG\e[0m]: \e[1m$*\e[0m"; } 20 | 21 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 22 | _help_message 23 | exit 0 24 | fi 25 | 26 | if [ -z "$1" ]; then 27 | aberr "Please specify a target architecture in AOSC OS format." 28 | _help_message 29 | exit 1 30 | fi 31 | 32 | # Triple map. 33 | case $1 in 34 | amd64) 35 | CHOST="amd64-aosc-linux-gnu" 36 | ;; 37 | arm64) 38 | CHOST="aarch64-aosc-linux-gnu" 39 | ;; 40 | armv4) 41 | CHOST="arm-aosc-linux-gnueabi" 42 | ;; 43 | armv6hf) 44 | CHOST="arm-aosc-linux-gnueabihf" 45 | ;; 46 | armv7hf) 47 | CHOST="arm-aosc-linux-gnueabihf" 48 | ;; 49 | i486) 50 | CHOST="i486-aosc-linux-gnu" 51 | ;; 52 | loongson2f) 53 | CHOST="mips64el-aosc-linux-gnuabi64" 54 | ;; 55 | loongson3) 56 | CHOST="mips64el-aosc-linux-gnuabi64" 57 | RUSTFLAGS="${RUSTFLAGS} -Clink-args=-fuse-ld=bfd" 58 | ;; 59 | loongarch64) 60 | CHOST="loongarch64-aosc-linux-gnu" 61 | ;; 62 | m68k) 63 | CHOST="m68k-aosc-linux-gnu" 64 | ;; 65 | mips32r6el) 66 | CHOST="mipsisa32r6el-aosc-linux-gnu" 67 | RUSTFLAGS='-Clink-arg=-latomic --cap-lints allow' 68 | ;; 69 | mips64r6el) 70 | CHOST="mipsisa64r6el-aosc-linux-gnuabi64" 71 | RUSTFLAGS="${RUSTFLAGS} -Clink-args=-fuse-ld=bfd" 72 | ;; 73 | powerpc) 74 | CHOST="powerpc-aosc-linux-gnu" 75 | ;; 76 | ppc64) 77 | CHOST="powerpc64-aosc-linux-gnu" 78 | ;; 79 | ppc64el) 80 | CHOST="powerpc64le-aosc-linux-gnu" 81 | ;; 82 | riscv64) 83 | CHOST="riscv64-aosc-linux-gnu" 84 | ;; 85 | alpha) 86 | CHOST="alpha-aosc-linux-gnu" 87 | ;; 88 | esac 89 | 90 | # Generate Rust architecture from triple. 91 | RARCH="${CHOST%%-*}" 92 | 93 | # Generate LLVM triple. 94 | RHOST="${CHOST/aosc/unknown}" 95 | # Generate LLVM tuple for variables. 96 | RHOST_ENV="${RHOST//\-/_}" 97 | RHOST_ENV="${RHOST_ENV^^}" 98 | 99 | abinfo "Applying supplied patches ..." 100 | shopt -s nullglob 101 | if [[ -n $(echo *.patch) ]]; then 102 | for i in *.patch; do 103 | abinfo "... $i ..." 104 | patch -Np1 -i $i || \ 105 | aberr "Failed to apply patch $i: $?" 106 | done 107 | fi 108 | 109 | abinfo "Generating config.toml ..." 110 | cat > config.toml <> config.toml < rustc-bootstrap-$(cat version | cut -f1 -d' ')-$1.tar.xz || \ 153 | aberr "Failed to build cross Rust tarball for $1 ($CHOST): $?" 154 | 155 | abinfo "Generating checksums for the cross Rust tarball ($1, $CHOST) ..." 156 | sha256sum rustc-bootstrap-$(cat version | cut -f1 -d' ')-$1.tar.xz \ 157 | > rustc-bootstrap-$(cat version | cut -f1 -d' ')-$1.tar.xz.sha256sum || \ 158 | aberr "Failed to generate checksums for the cross Rust tarball ($1, $CHOST): $?" 159 | 160 | abinfo "Build complete, cross Rust tarball ($1, $CHOST) available at:" 161 | echo -e " 162 | $PWD/rustc-bootstrap-$(cat version | cut -f1 -d' ')-$1.tar.xz 163 | " 164 | -------------------------------------------------------------------------------- /buildbot-benchmark/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /buildbot-benchmark/README.md: -------------------------------------------------------------------------------- 1 | buildbot-benchmark.bash 2 | ======================= 3 | 4 | Moved to [AOSC-Dev/buildbot-benchmark](https://github.com/AOSC-Dev/buildbot-benchmark). 5 | -------------------------------------------------------------------------------- /buildlog/buildlog: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0 3 | set -Eeuo pipefail 4 | trap 'errorHandler "$?" "${FUNCNAME[0]}" "$LINENO"' ERR 5 | 6 | error() { 7 | echo -e "\e[0;31m[ERROR] $*\e[0m" >&2 8 | } 9 | 10 | die() { 11 | error "$*" 12 | exit 1 13 | } 14 | 15 | log() { 16 | echo -e "\e[0;32m$*\e[0m" >&2 17 | } 18 | 19 | errorHandler() { 20 | echo -e "\e[0;31m[BUG] Line $3 ($2): $1\e[0m" >&2 21 | exit "$1" 22 | } 23 | 24 | if [[ $# != 1 ]]; then 25 | cat < 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /bump-rel/README.md: -------------------------------------------------------------------------------- 1 | # bump-rel 2 | 3 | ## Usage 4 | 5 | ``` 6 | for i in $(cat groups/group); do bump-rel $i/spec; done 7 | ``` 8 | -------------------------------------------------------------------------------- /bump-rel/bump-rel: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # A simple script to "bump REL". 3 | # Do whatever the fuck you want with this script. 4 | # 5 | # Usage: bump-rel path/to/spec/file 6 | 7 | # Gotta know how to use it eh? 8 | if [ -z "$1" ]; then 9 | echo "Usage: bump-rel path/to/spec/file" 10 | exit 1 11 | fi 12 | 13 | # Just to be sure. 14 | unset REL 15 | 16 | # If a file does not contain line containing REL= on the line beginning, 17 | # then append 'REL=1'; otherwise, bump REL. 18 | # 19 | # FIXME: does not give two f*cks about duplicate REL=, not pretty, but 20 | # practically, nothing can really go wrong - or can it? 21 | if ! grep -E '^REL=' "$1" >/dev/null; then 22 | echo 'REL=1' >>"$1" 23 | else 24 | # shellcheck source=/dev/null 25 | source "$PWD"/"$1" 26 | NEWREL=$(($REL + 1)) 27 | sed -e "s|REL=.*|REL=$NEWREL|g" -i "$1" 28 | fi 29 | -------------------------------------------------------------------------------- /chkupdate-anitya-checker.rkt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env racket 2 | 3 | ;; Copyright 2025 Kaiyang Wu 4 | ;; 5 | ;; Permission is hereby granted, free of charge, to any person obtaining a copy 6 | ;; of this software and associated documentation files (the “Software”), to 7 | ;; deal in the Software without restriction, including without limitation the 8 | ;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 9 | ;; sell copies of the Software, and to permit persons to whom the Software is 10 | ;; furnished to do so, subject to the following conditions: 11 | ;; 12 | ;; The above copyright notice and this permission notice shall be included in 13 | ;; all copies or substantial portions of the Software. 14 | ;; 15 | ;; THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | ;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | ;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | ;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | ;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 | ;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 | ;; IN THE SOFTWARE. 22 | 23 | ;; Prerequisites on AOSC OS 24 | ;; 25 | ;; `oma install racket` 26 | ;; 27 | ;; Usage 28 | ;; 29 | ;; Run inside an abbs tree. It'll report potential duplicate anitya ids and some 30 | ;; warnings 31 | 32 | #lang racket/base 33 | 34 | (require racket/file 35 | racket/match 36 | racket/path 37 | racket/string) 38 | 39 | (define all-specs 40 | (find-files (λ (f) 41 | (and (not (directory-exists? f)) 42 | (equal? "spec" (path->string (file-name-from-path f))))) 43 | (current-directory))) 44 | 45 | (define (check-dup ids specs) 46 | (cond 47 | [(not (null? specs)) 48 | (define lines (file->lines (car specs))) 49 | (define chkupdate-lines 50 | (filter (λ (l) (string-prefix? l "CHKUPDATE=\"anitya")) lines)) 51 | (cond 52 | [(null? chkupdate-lines) 53 | (when (foldl (λ (l acc) (and acc (not (equal? l "DUMMYSRC=1")))) 54 | #t 55 | lines) 56 | (displayln (format "~a: WARN: No CHKUPDATE found" (car specs)))) 57 | (check-dup ids (cdr specs))] 58 | [else 59 | (define chkupdate-line (car chkupdate-lines)) 60 | (with-handlers ([exn:misc:match? 61 | (λ (e) 62 | (displayln (format "~a: WARN: Malformed CHKUPDATE" 63 | (car specs))) 64 | (check-dup ids (cdr specs)))]) 65 | (match-define (regexp #rx"CHKUPDATE=\"anitya::id=(.*)\"" (list _ id)) 66 | chkupdate-line) 67 | (if (hash-has-key? ids id) 68 | (check-dup (hash-update ids id (λ (v) (cons (car specs) v))) 69 | (cdr specs)) 70 | (check-dup (hash-set ids id (list (car specs))) (cdr specs))))])] 71 | [else ids])) 72 | 73 | (define id-specs (hash->list (check-dup (hash) all-specs))) 74 | (for ([id-spec id-specs] 75 | #:when 76 | (and (> (length (cdr id-spec)) 1) 77 | (< (length (cdr id-spec)) 5) 78 | (foldl (λ (p acc) 79 | (and acc (not (string-contains? (path->string p) "+32")))) 80 | #t 81 | (cdr id-spec)))) 82 | (displayln (format "ID ~a is used by ~a" 83 | (car id-spec) 84 | (string-join (map path->string (cdr id-spec)))))) 85 | -------------------------------------------------------------------------------- /ciel-batch/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /ciel-batch/README.md: -------------------------------------------------------------------------------- 1 | ciel-batch 2 | ------- 3 | 4 | A simple wrapper script to build a list of packages using Ciel, with build 5 | environments reset for each package. 6 | 7 | Usage: 8 | 9 | ``` 10 | [linux32] ciel-batch opencollada python-2 python-3 11 | ``` 12 | 13 | Or, using a list, for instance: 14 | 15 | ``` 16 | opencollada 17 | python-2 18 | python-3 19 | ``` 20 | 21 | And invoke: 22 | 23 | ``` 24 | [linux32] ciel-batch `cat list` 25 | ``` 26 | -------------------------------------------------------------------------------- /ciel-batch/ciel-batch: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | set -e 3 | for i in "$@"; do 4 | ciel rollback 5 | sleep 3 6 | ciel build "$i" || read -p check 7 | done 8 | -------------------------------------------------------------------------------- /clean-topics/aosc-clean-topics.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=AOSC Topic Cleaning 3 | Wants=network-online.target 4 | After=network-online.target 5 | 6 | [Service] 7 | ExecStart=/usr/bin/python3 main.py /mirror/debs/pool/ 8 | 9 | [Install] 10 | WantedBy=multi-user.target 11 | -------------------------------------------------------------------------------- /clean-topics/main.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import sys 3 | import os 4 | import shutil 5 | from pathlib import PosixPath 6 | 7 | 8 | def collect_all_branches(): 9 | page = 1 10 | branches = [] 11 | while True: 12 | print(f'Reading page {page} ...') 13 | resp = requests.get(f'https://api.github.com/repos/AOSC-Dev/aosc-os-abbs/branches?per_page=100&page={page}', headers={ 14 | 'Authorization': f'bearer {os.environ["GITHUB_TOKEN"]}'}) 15 | resp.raise_for_status() 16 | b = resp.json() 17 | branches.extend(b) 18 | if len(b) == 100: 19 | page += 1 20 | continue 21 | else: 22 | break 23 | return branches 24 | 25 | 26 | def main(): 27 | root_path = PosixPath(sys.argv[1]) 28 | if not root_path.is_dir(): 29 | raise Exception(f'{root_path} is not a directory') 30 | topics = os.listdir(root_path) 31 | print('Reading topics list ...') 32 | branches = collect_all_branches() 33 | print('Done reading topics list.') 34 | branches_lookup = set([i['name'] for i in branches]) 35 | print(f'Found {len(branches_lookup)} branches.') 36 | closed = [] 37 | for topic in topics: 38 | if topic == 'stable' or topic.startswith('.') or topic.startswith('bsp-'): 39 | continue 40 | topic_path = root_path.joinpath(topic) 41 | if not topic_path.is_dir(): 42 | continue 43 | if topic not in branches_lookup: 44 | if not topic_path.joinpath('DEPRECATED').is_file(): 45 | with open(topic_path.joinpath('DEPRECATED'), 'wb') as f: 46 | f.write(b'WARNING: This topic will be deleted.\n') 47 | print(f'Warning marker set: {topic}') 48 | continue 49 | closed.append(topic) 50 | for pr in closed: 51 | shutil.rmtree(root_path.joinpath(pr)) 52 | print('Deleted: {}'.format(pr)) 53 | 54 | 55 | if __name__ == "__main__": 56 | main() 57 | -------------------------------------------------------------------------------- /collect-users/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /collect-users/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "collect-users" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | walkdir = "2.5.0" 8 | anyhow = "1" 9 | reqwest = { version = "0.12", features = ["blocking"] } 10 | liblzma = "0.4" 11 | tar = "0.4" 12 | -------------------------------------------------------------------------------- /collect-users/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | env::{args, current_exe}, 3 | fs, 4 | io::Read, 5 | }; 6 | 7 | use anyhow::{anyhow, Context, Result}; 8 | use liblzma::read::XzDecoder; 9 | use reqwest::blocking::ClientBuilder; 10 | use tar::Archive; 11 | use walkdir::WalkDir; 12 | 13 | fn main() -> Result<()> { 14 | let tree = args().nth(1).context(format!( 15 | "Usage: {} TREE_PATH", 16 | current_exe().unwrap().display() 17 | ))?; 18 | 19 | let mut users = vec![]; 20 | sysusers(&mut users, &tree)?; 21 | usergroup(&mut users, &tree)?; 22 | bootstrap(&mut users)?; 23 | 24 | for u in users { 25 | println!("{}", u); 26 | } 27 | 28 | Ok(()) 29 | } 30 | 31 | fn sysusers(users: &mut Vec, tree: &str) -> Result<()> { 32 | for i in WalkDir::new(tree).min_depth(8).max_depth(8) { 33 | let i = i?; 34 | if i.path().to_string_lossy().contains("sysusers.d") 35 | && i.path().extension().is_some_and(|x| x == "conf") 36 | { 37 | let f = fs::read_to_string(i.path())?; 38 | for i in f.lines() { 39 | if i.starts_with('#') { 40 | continue; 41 | } 42 | let user = i.split_ascii_whitespace().nth(1); 43 | if let Some(user) = user { 44 | if !users.contains(&user.to_string()) { 45 | users.push(user.to_string()); 46 | } 47 | } 48 | } 49 | } 50 | } 51 | 52 | Ok(()) 53 | } 54 | 55 | fn usergroup(users: &mut Vec, tree: &str) -> Result<()> { 56 | for i in WalkDir::new(tree).min_depth(4).max_depth(4) { 57 | let i = i?; 58 | if i.path().ends_with("usergroup") { 59 | let f = fs::read_to_string(i.path())?; 60 | let line = f 61 | .trim() 62 | .lines() 63 | .nth(1) 64 | .ok_or(anyhow!("Failed to parse: {}", i.path().display()))?; 65 | let user = line.split_ascii_whitespace().nth(1); 66 | if let Some(user) = user { 67 | if !users.contains(&user.to_string()) { 68 | users.push(user.to_string()); 69 | } 70 | } 71 | } 72 | } 73 | 74 | Ok(()) 75 | } 76 | 77 | fn bootstrap(users: &mut Vec) -> Result<()> { 78 | let client = ClientBuilder::new().user_agent("wget").build()?; 79 | let resp = client.get("https://raw.githubusercontent.com/AOSC-Dev/aoscbootstrap/refs/heads/master/assets/etc-bootstrap.tar.xz") 80 | .send()? 81 | .error_for_status()?; 82 | 83 | let xz = XzDecoder::new(resp); 84 | let mut tar = Archive::new(xz); 85 | 86 | for file in tar.entries()? { 87 | let mut f = file?; 88 | if f.path()?.to_string_lossy() == "etc/passwd" { 89 | let mut s = String::new(); 90 | f.read_to_string(&mut s)?; 91 | for i in s.trim().lines() { 92 | let (user, _) = i.split_once(':').context("Failed to parse etc/passwd")?; 93 | if !users.contains(&user.to_string()) { 94 | users.push(user.to_string()); 95 | } 96 | } 97 | break; 98 | } 99 | } 100 | 101 | Ok(()) 102 | } 103 | -------------------------------------------------------------------------------- /commit-o-matic/COPYING: -------------------------------------------------------------------------------- 1 | Copyright 2020 Leo Shen 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /commit-o-matic/README.md: -------------------------------------------------------------------------------- 1 | commit-o-matic 2 | -------------- 3 | 4 | A simple script that commit and add new version message for you. 5 | 6 | ``` 7 | Usage: 8 | 9 | commit-o-matic PACKAGE_GROUP TYPE [MESSAGE] 10 | 11 | - PACKAGE_GROUP: Path to the list of packages to be committed. 12 | (Example: TREE/groups/plasma) 13 | - TYPE: type of the desired operation (new, update, or bump-rel) 14 | - [MESSAGE]: if TYPE=bump-rel, you need to specify why. Input reason here. 15 | ``` 16 | 17 | > No Buckling Spring Keyboards Were Harmed in the Making of This Script. 18 | -------------------------------------------------------------------------------- /commit-o-matic/commit-o-matic.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | _help_message() { 3 | printf "\ 4 | Usage: 5 | 6 | commit-o-matic PACKAGE_GROUP TYPE [MESSAGE] 7 | 8 | - PACKAGE_GROUP: Path to the list of packages to be committed. 9 | (Example: TREE/groups/plasma) 10 | - TYPE: type of the desired operation (new, update, or bump-rel) 11 | - [MESSAGE]: if TYPE=bump-rel, you need to specify why. Input reason here. 12 | 13 | " 14 | } 15 | 16 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 17 | _help_message 18 | exit 0 19 | fi 20 | 21 | if [ -z "$1" ]; then 22 | echo -e "[!!!] Please specify a package group.\n" 23 | _help_message 24 | exit 1 25 | fi 26 | 27 | if [ -z "$2" ]; then 28 | echo -e "[!!!] Please specify an operation.\n" 29 | _help_message 30 | exit 1 31 | fi 32 | 33 | # Given a list of packages, automatically commit based on the new version number 34 | if [[ $2 == "update" ]]; then 35 | for i in $(cat $1); do 36 | git add --all $i 37 | git commit -m "${i##*/}: update to $(grep "VER=" $i/spec | cut -d "=" -f2)" 38 | done 39 | elif [[ $2 == "new" ]]; then 40 | for i in $(cat $1); do 41 | git add --all $i 42 | git commit -m "${i##*/}: new, $(grep "VER=" $i/spec | cut -d "=" -f2)" 43 | done 44 | elif [[ $2 == "bump-rel" ]]; then 45 | if [ -z "$3" ]; then 46 | echo -e "[!!!] Need a reason for revision." 47 | _help_messgae 48 | exit 1 49 | fi 50 | 51 | for i in $(cat $1); do 52 | git add --all $i 53 | git commit -m "${i##*/}: $3" 54 | done 55 | fi 56 | -------------------------------------------------------------------------------- /commitpacs/commitpacs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0 3 | set -Eeuo pipefail 4 | trap 'errorHandler "$?" "${FUNCNAME[0]}" "$LINENO"' ERR 5 | 6 | error() { 7 | echo -e "\e[0;31m[ERROR] $*\e[0m" >&2 8 | } 9 | 10 | die() { 11 | error "$*" 12 | exit 1 13 | } 14 | 15 | log() { 16 | echo -e "\e[0;32m$*\e[0m" >&2 17 | } 18 | 19 | errorHandler() { 20 | echo -e "\e[0;31m[BUG] Line $3 ($2): $1\e[0m" >&2 21 | exit "$1" 22 | } 23 | 24 | getPkgDir() { 25 | local path 26 | path="$(find . -mindepth 2 -maxdepth 2 -type d -name "$1" -print -quit)" 27 | echo "${path#./}" 28 | } 29 | 30 | getPkgVer() { 31 | ( 32 | # shellcheck source=/dev/null 33 | source "$1"/spec 34 | echo "$VER" 35 | ) 36 | } 37 | 38 | commitPkg() { 39 | local pkg="$1" 40 | local pkgDir pkgVer 41 | pkgDir="$(getPkgDir "$pkg")" 42 | pkgVer="$(getPkgVer "$pkgDir")" 43 | 44 | if [[ "${BUMPREL:-}" == 1 ]]; then 45 | log "[$pkg] Bumping REL ..." 46 | "$(dirname "$(dirname "$(realpath "$(command -v "$0")")")")/bump-rel/bump-rel" \ 47 | "$pkgDir/spec" 48 | fi 49 | 50 | local message 51 | message="$pkg: ${commitTemplate}" 52 | message="${message//\{PKGNAME\}/${pkg}}" 53 | message="${message//\{PKGVER\}/${pkgVer}}" 54 | 55 | log "[$pkg] $message" 56 | git add "$pkgDir" 57 | git commit -m "$message" -- "$pkgDir" 58 | 59 | local commitLog 60 | commitLog="$(git -c core.abbrev=16 \ 61 | log HEAD \ 62 | --oneline -1 --no-decorate --color=always)" 63 | log "[$pkg] $commitLog" 64 | 65 | log "[$pkg] SUCCESS!" 66 | } 67 | 68 | if [[ $# == 0 ]]; then 69 | cat < [PACKAGES]... 71 | 72 | Commit many packages with a message. 73 | When packages is not set, all changed packages are committed. 74 | 75 | Possible environment variables: 76 | BUMPREL: Set to 1 to bump REL before committing 77 | EOF 78 | exit 79 | fi 80 | 81 | declare commitTemplate 82 | case "$1" in 83 | n | new) 84 | # shellcheck disable=SC2016 85 | commitTemplate='new, {PKGVER}' 86 | ;; 87 | u | upd | update) 88 | # shellcheck disable=SC2016 89 | commitTemplate='update to {PKGVER}' 90 | ;; 91 | *) 92 | commitTemplate="$1" 93 | ;; 94 | esac 95 | shift 96 | 97 | commitTemplate="${commitTemplate#\?\: }" 98 | 99 | declare -a pkgs 100 | if [[ $# == 0 ]]; then 101 | readarray -t pkgs < <(git status --porcelain | cut -d' ' -f 3 | cut -d'/' -f2 | sort | uniq) 102 | else 103 | pkgs=("$@") 104 | fi 105 | 106 | for pkg in "${pkgs[@]}"; do 107 | if ! commitPkg "$pkg"; then 108 | error "[$pkg] FAILED" 109 | fi 110 | done 111 | -------------------------------------------------------------------------------- /copypac/copypac: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0 3 | set -Eeuo pipefail 4 | trap 'errorHandler "$?" "${FUNCNAME[0]}" "$LINENO"' ERR 5 | 6 | error() { 7 | echo -e "\e[0;31m[ERROR] $*\e[0m" >&2 8 | } 9 | 10 | die() { 11 | error "$*" 12 | exit 1 13 | } 14 | 15 | log() { 16 | echo -e "\e[0;32m$*\e[0m" >&2 17 | } 18 | 19 | errorHandler() { 20 | echo -e "\e[0;31m[BUG] Line $3 ($2): $1\e[0m" >&2 21 | exit "$1" 22 | } 23 | 24 | getPkgDir() { 25 | local path 26 | path="$(find . -mindepth 2 -maxdepth 2 -type d -name "$1" -print -quit)" 27 | echo "${path#./}" 28 | } 29 | 30 | getPkgVer() { 31 | ( 32 | # shellcheck source=/dev/null 33 | source "$1"/spec 34 | echo "$VER" 35 | ) 36 | } 37 | 38 | copyPackage() { 39 | local fromPac="$1" newPac="$2" 40 | local pkgSection 41 | pkgSection="$(dirname "$(getPkgDir "$1")")" 42 | local pkgVer 43 | pkgVer="$(getPkgVer "$pkgSection/$1")" 44 | 45 | if ! [[ -e "$pkgSection/$fromPac" ]]; then 46 | error "[$1] Source package does not exist" 47 | return 1 48 | fi 49 | if [[ -e "$pkgSection/$newPac" ]]; then 50 | error "[$2] New package already exists" 51 | return 1 52 | fi 53 | 54 | log "[$1 -> $2] Creating package ..." 55 | cp -r "$pkgSection/$fromPac" "$pkgSection/$newPac" 56 | perl -i -pe "s/^PKGNAME=.+\n/PKGNAME=$newPac\n/g" "$pkgSection/$newPac/autobuild/defines" || true 57 | perl -i -pe "s/^PKGEPOCH=\d+\n//g" "$pkgSection/$newPac/autobuild/defines" || true 58 | 59 | log "[$1 -> $2] Committing ..." 60 | git add "$pkgSection/$newPac" 61 | git commit -m "$newPac: new, $pkgVer" \ 62 | --trailer X-AOSC-copypac-from="$fromPac" \ 63 | --trailer X-AOSC-copypac-version="$pkgVer" \ 64 | --trailer X-AOSC-copypac-section="$pkgSection" \ 65 | -- "$pkgSection/$newPac" 66 | 67 | local commitLog 68 | commitLog="$(git -c core.abbrev=16 \ 69 | log HEAD \ 70 | --oneline -1 --no-decorate --color=always)" 71 | log "[$1 -> $2] $commitLog" 72 | 73 | log "[$1 -> $2] SUCCESS" 74 | } 75 | 76 | if [[ $# != 2 ]]; then 77 | cat < 79 | EOF 80 | exit 81 | fi 82 | 83 | if ! copyPackage "$1" "$2"; then 84 | error "[$1 -> $2] FAILED" 85 | fi 86 | -------------------------------------------------------------------------------- /cp-l: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # cp-l: Determines if we are doing the cp job in the same fs. 3 | # If so, add '-l' to cp. 4 | # Arguments from cp (GNU coreutils) 8.23, therefore: 5 | # @license GPL-3.0+ 6 | shopt -s extglob 7 | 8 | declare -A mount_state 9 | # initialization: mtab 10 | while IFS=' ' read -r fs mpoint type opts dump pass; do 11 | # hey printf eat the escapes 12 | mpoint="$(printf %b- "${mpoint%/}")" 13 | mpoint="${mpoint%-}" 14 | # hey wut is dis 15 | # [[ $fs == *fs ]] || fs="$(readlink -e "$fs")" 16 | mount_state["$mpoint"]="${fs%/} $type $opts $dump $pass" 17 | done < /etc/mtab 18 | 19 | # argument processing 20 | gopt="$(getopt -o abdfiHlLnPpRrsStTuvxZ \ 21 | --long archive,attributes-only,backup:,copy-contents,force,interactive,link,dereference,no-clobber,no-dereference,\ 22 | preserve:,no-preserve:,parents,recursive,reflink:,remove-destination,sparse:,strip-trailing-slashes,\ 23 | symbolic-link,suffix:,target-directory:,no-target-directory,update,verbose,one-file-system,context:,help,version \ 24 | -n "$0" -- "$@")" || exit $? 25 | eval gopt="($gopt)" 26 | gargc=${#gopt[@]} 27 | 28 | target_is_dir=0 29 | for ((f=0; f (!target_is_dir) )); then 93 | cp "${gopt[@]}" --link -- "${files_link[@]}" 94 | fi && 95 | if ((${#files_norm[@]} > (!target_is_dir) )); then 96 | cp "${gopt[@]}" -- "${files_norm[@]}" 97 | fi 98 | 99 | 100 | -------------------------------------------------------------------------------- /deb-tree/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /deb-tree/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "deb-tree" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [dependencies] 7 | oma-apt = "0.8" 8 | termtree = "0.5.1" 9 | clap = { version = "4", features = ["derive"] } 10 | anyhow = "1" 11 | -------------------------------------------------------------------------------- /deb-tree/src/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use clap::Parser; 3 | use oma_apt::{Cache, DepType, Package, new_cache}; 4 | use std::io::{Write, stdout}; 5 | use termtree::Tree; 6 | 7 | #[derive(Debug, Parser)] 8 | struct Args { 9 | /// Query Package name 10 | package: String, 11 | /// Search depth 12 | #[arg(short, long, default_value_t = 5)] 13 | depth: u8, 14 | /// invert search dependency 15 | #[arg(short, long)] 16 | invert: bool, 17 | } 18 | 19 | fn main() -> anyhow::Result<()> { 20 | let Args { 21 | depth: limit_depth, 22 | package: pkg, 23 | invert, 24 | } = Args::parse(); 25 | 26 | let cache = new_cache!().context("Failed to init apt cache")?; 27 | let pkg = cache 28 | .get(&pkg) 29 | .with_context(|| format!("Failed to get package {}", pkg))?; 30 | 31 | if !invert { 32 | writeln!(stdout(), "{}", dep_tree(&pkg, &cache, 1, limit_depth)?).ok(); 33 | } else { 34 | writeln!( 35 | stdout(), 36 | "{}", 37 | reverse_dep_tree(&pkg, &cache, 1, limit_depth) 38 | ) 39 | .ok(); 40 | } 41 | 42 | Ok(()) 43 | } 44 | 45 | fn reverse_dep_tree(pkg: &Package<'_>, cache: &Cache, depth: u8, limit: u8) -> Tree { 46 | let mut res = Tree::new(pkg.name().to_string()); 47 | 48 | let rdep = pkg.rdepends(); 49 | 50 | if depth > limit { 51 | return res; 52 | } 53 | 54 | for (t, deps) in rdep { 55 | if t == &DepType::Depends { 56 | for dep in deps { 57 | let pkg = cache.get(dep.first().name()); 58 | 59 | if let Some(pkg) = pkg { 60 | if pkg.is_installed() { 61 | res.push(reverse_dep_tree(&pkg, cache, depth + 1, limit)); 62 | } 63 | } 64 | } 65 | } 66 | } 67 | 68 | res 69 | } 70 | 71 | fn dep_tree( 72 | pkg: &Package<'_>, 73 | cache: &Cache, 74 | depth: u8, 75 | limit: u8, 76 | ) -> anyhow::Result> { 77 | let mut res = Tree::new(pkg.name().to_string()); 78 | 79 | if depth > limit { 80 | return Ok(res); 81 | } 82 | 83 | let cand = pkg 84 | .candidate() 85 | .with_context(|| format!("Failed to get candidate for package {}", pkg.name()))?; 86 | 87 | let deps = cand.dependencies(); 88 | 89 | if let Some(deps) = deps { 90 | for dep in deps { 91 | if let Some(dep) = cache.get(dep.first().name()) { 92 | res.push(dep_tree(&dep, cache, depth + 1, limit)?); 93 | } 94 | } 95 | } 96 | 97 | Ok(res) 98 | } 99 | -------------------------------------------------------------------------------- /debootstrap/README.md: -------------------------------------------------------------------------------- 1 | # Debootstrap recipe for AOSC OS 2 | 3 | ## Usage 4 | 5 | Copy `aosc` file to `/usr/share/debootstrap/scripts/` and then run `debootstrap` like this: 6 | 7 | ``` 8 | sudo debootstrap --arch= aosc 9 | ``` 10 | 11 | For example, to install `stable` branch of AOSC OS on the system with `amd64` architecture to `/mnt/system` using `repo.aosc.io` as the repository, you would use: 12 | 13 | ``` 14 | sudo debootstrap --arch=amd64 stable /mnt/system https://repo.aosc.io/debs/ aosc 15 | ``` 16 | -------------------------------------------------------------------------------- /debootstrap/aosc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mirror_style release 3 | download_style apt 4 | finddebs_style from-indices 5 | variants - buildd fakechroot minbase 6 | default_mirror https://cf-repo.aosc.io/debs 7 | 8 | work_out_debs () { 9 | required='apt gcc-runtime tar xz gnupg' 10 | base='admin-base util-base iptables' 11 | } 12 | 13 | first_stage_install () { 14 | # additional packages from `noarch` 15 | ca_certs="$MIRRORS/pool/stable/main/c/ca-certs_20191125-0_noarch.deb" 16 | ca_certs_deb="/var/cache/apt/archives/$(basename $ca_certs)" 17 | get "$ca_certs" "$TARGET/$ca_certs_deb" "4013ef7fac210bb040c28c32b65a9a5ced8fcaccb9bc04c9cd0e8ace8ad05eec" 295504 18 | echo "ca-certs $ca_certs_deb" >> "$TARGET/debootstrap/debpaths" 19 | 20 | extract 'aosc-aaa' 'ca-certs' 21 | extract $(without "$required grep" 'aosc-aaa') 22 | cp -ar "$TARGET/bin/"* "$TARGET/usr/bin/" 23 | rm -rf "$TARGET/bin/" 24 | ln -s usr/bin "$TARGET/bin" 25 | cp -ar "$TARGET/usr/lib64/"* "$TARGET/usr/lib/" 26 | rm -rf "$TARGET/usr/lib64/" 27 | ln -s lib "$TARGET/usr/lib64" 28 | cp -ar "$TARGET/var/run/"* "$TARGET/run/" 29 | rm -rf "$TARGET/var/run/" 30 | ln -s /run "$TARGET/var/run" 31 | mkdir -p "$TARGET/var/lib/dpkg" 32 | : >"$TARGET/var/lib/dpkg/status" 33 | : >"$TARGET/var/lib/dpkg/available" 34 | setup_etc 35 | setup_devices 36 | } 37 | 38 | second_stage_install () { 39 | in_target /bin/true 40 | 41 | setup_dynamic_devices 42 | curl 'https://repo.aosc.io/aosc-repacks/etc-bootstrap.tar.xz' | tar xJf - -C "$TARGET" 43 | DEBIAN_FRONTEND=noninteractive 44 | DEBCONF_NONINTERACTIVE_SEEN=true 45 | export DEBIAN_FRONTEND DEBCONF_NONINTERACTIVE_SEEN 46 | 47 | info INSTCORE "Installing core packages (precondition)..." 48 | in_target apt-get install -y iptables shadow grep keyutils 49 | in_target bash -c "echo 'root:anthon' | chpasswd" 50 | info INSTCORE "Updating package cache..." 51 | in_target apt-get update 52 | info INSTCORE "Installing core packages..." 53 | for i in {0..5} 54 | do in_target apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew" install admin-base util-base systemd-base network-base web-base core-base editor-base python-base bash-startup && \ 55 | break || (warning INSTCORE "Failure while unpacking required packages. This will be attempted up to five times."; in_target apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew" install -f) 56 | done 57 | in_target cp -rT /etc/skel /root/ 58 | info BASESUCCESS "Base system installed successfully." 59 | } 60 | -------------------------------------------------------------------------------- /diff-deb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Usage: diff-deb.sh left.deb right.deb 3 | left=$(mktemp /tmp/diff-deb.XXXXXX) 4 | dpkg --contents $1 | awk '!($2=$3=$4=$5="")' > $left 5 | 6 | right=$(mktemp /tmp/diff-deb.XXXXXX) 7 | dpkg --contents $2 | awk '!($2=$3=$4=$5="")' > $right 8 | 9 | diff -u $left $right 10 | -------------------------------------------------------------------------------- /distro-preflight-check/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /distro-preflight-check/README.md: -------------------------------------------------------------------------------- 1 | revoke-leaked-keys.bash 2 | === 3 | 4 | Detect and collect leaked SSH host keys in AOSC OS system media. 5 | 6 | Usage 7 | --- 8 | 9 | On the AOSC repository server: 10 | 11 | ``` 12 | sudo bash revoke-leaked-keys.bash 13 | ``` 14 | 15 | The collected keys will be found in `${PWD}/revoked`, prefixed by the image 16 | filename from which they were found. 17 | -------------------------------------------------------------------------------- /distro-preflight-check/check-leaked-files.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Leaked sensitive file(s) found? 4 | _leaked_file_found='0' 5 | # Leaked sensitive file(s) filename(s). 6 | _leaked_file_name='' 7 | 8 | case "$1" in 9 | *_installer_*) 10 | _media_type='Installer' 11 | 12 | mkdir _$(basename $1){,_sqfs} 13 | mount $1 _$(basename $1) 14 | # Mount all SquashFS. 15 | for j in _$(basename $1)/squashfs/*.squashfs \ 16 | _$(basename $1)/squashfs/layers/*.squashfs; do 17 | mkdir _$(basename $1)_sqfs/$(basename $j) 18 | mount $j _$(basename $1)_sqfs/$(basename $j) 19 | # Assemble a list of sensitive files. 20 | for k in _$(basename $1)_sqfs/$(basename $j)/etc/ssh/ssh_host_*_key* \ 21 | _$(basename $1)_sqfs/$(basename $j)/etc/machine-id; do 22 | if [ -e ${k} ]; then 23 | _leaked_file_found='1' 24 | _leaked_file_name+=" $(basename $j):${k//_$(basename $1)_sqfs\/$(basename $j)/}" 25 | fi 26 | done 27 | umount _$(basename $1)_sqfs/$(basename $j) 28 | rm -r _$(basename $1)_sqfs/$(basename $j) 29 | done 30 | # FIXME: Template .squashfs files share some of the same names 31 | # as the layers images. 32 | for j in _$(basename $1)/squashfs/templates/*.squashfs; do 33 | mkdir _$(basename $1)_sqfs/$(basename $j) 34 | mount $j _$(basename $1)_sqfs/$(basename $j) 35 | # Collect a list of sensitive files. 36 | for k in _$(basename $1)_sqfs/$(basename $j)/etc/ssh/ssh_host_*_key* \ 37 | _$(basename $1)_sqfs/$(basename $j)/etc/machine-id; do 38 | if [ -e ${k} ]; then 39 | _leaked_file_found='1' 40 | _leaked_file_name+=" $(basename $j):${k//_$(basename $1)_sqfs\/$(basename $j)/}" 41 | fi 42 | done 43 | umount _$(basename $1)_sqfs/$(basename $j) 44 | rm -r _$(basename $1)_sqfs/$(basename $j) 45 | done 46 | 47 | # FIXME: Weird race conditions causing umount to fail. 48 | sleep 1 49 | 50 | umount _$(basename $1) 51 | rm -r _$(basename $1){,_sqfs} 52 | ;; 53 | *_livekit_*) 54 | _media_type='LiveKit' 55 | 56 | mkdir _$(basename $1){,_rootfs,_sqfs} 57 | mount $1 _$(basename $1) 58 | # iso/LiveOS/squashfs.img is a SquashFS image containing an ext4 image. 59 | mount _$(basename $1)/LiveOS/squashfs.img \ 60 | _$(basename $1)_sqfs 61 | # The ext4 image in question. 62 | mount _$(basename $1)_sqfs/LiveOS/rootfs.img \ 63 | _$(basename $1)_rootfs 64 | # Collect keys. 65 | for k in _$(basename $1)_rootfs/etc/ssh/ssh_host_*_key* \ 66 | _$(basename $1)_rootfs/etc/machine-id; do 67 | if [ -e ${k} ]; then 68 | _leaked_file_found='1' 69 | _leaked_file_name+=" ${k//_$(basename $1)_rootfs/}" 70 | fi 71 | done 72 | 73 | # FIXME: Weird race conditions causing umount to fail. 74 | sleep 1 75 | 76 | umount _$(basename $1)_rootfs 77 | umount _$(basename $1)_sqfs 78 | umount _$(basename $1) 79 | rm -r _$(basename $1){,_rootfs,_sqfs} 80 | ;; 81 | *.squashfs) 82 | _media_type='SquashFS' 83 | 84 | mkdir _$(basename $1) 85 | mount $1 _$(basename $1) 86 | # Collect keys. 87 | for k in _$(basename $1)/etc/ssh/ssh_host_*_key* \ 88 | _$(basename $1)/etc/machine-id; do 89 | if [ -e ${k} ]; then 90 | _leaked_file_found='1' 91 | _leaked_file_name+=" ${k//_$(basename $1)/}" 92 | fi 93 | done 94 | 95 | # FIXME: Weird race conditions causing umount to fail. 96 | sleep 1 97 | 98 | umount _$(basename $1) 99 | rm -r _$(basename $1) 100 | ;; 101 | esac 102 | 103 | if [ "$_leaked_file_found" = '1' ]; then 104 | echo -e "[!!!] Sensitive file(s) found in ${_media_type} image ${1}:\n" 105 | for i in $_leaked_file_name; do 106 | echo " ${i}" 107 | done 108 | exit 1 109 | else 110 | echo "[ooo] No sensitive file found in ${_media_type} image ${1} - congratulations!" 111 | fi 112 | -------------------------------------------------------------------------------- /distro-preflight-check/revoke-leaked-keys.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Create a revoked key store. 4 | mkdir revoked 5 | 6 | # Find all Installer images in our release directories, including temporary stores 7 | # at /mirror/misc/. 8 | for i in /mirror/misc/*installer*.iso \ 9 | /mirror/aosc-os/os-*/installer/*.iso \ 10 | /mirror/aosc-os/os-*/installer/preview/*.iso; do 11 | mkdir $(basename $i){,_sqfs} 12 | mount -v $i $(basename $i) 13 | # Mount all SquashFS. 14 | for j in $(basename $i)/squashfs/*.squashfs \ 15 | $(basename $i)/squashfs/layers/*.squashfs \ 16 | $(basename $i)/squashfs/templates/*.squashfs; do 17 | mkdir $(basename $i)_sqfs/$(basename $j) 18 | mount -v $j $(basename $i)_sqfs/$(basename $j) 19 | # Collect keys. 20 | for k in $(basename $i)_sqfs/$(basename $j)/etc/ssh/ssh_host_*_key*; do 21 | if [ -e ${k} ]; then 22 | cp -v ${k} revoked/$(basename $i)_$(basename $k) 23 | fi 24 | done 25 | done 26 | umount -Rf $(basename $i)_sqfs/*.squashfs 27 | umount -Rf $(basename $i) 28 | rm -r $(basename $i){,_sqfs} 29 | done 30 | 31 | # Same as above for LiveKit. 32 | for i in /mirror/misc/*livekit*.iso \ 33 | /mirror/aosc-os/os-*/livekit/*.iso \ 34 | /mirror/aosc-os/os-*/livekit/preview/*.iso; do 35 | mkdir $(basename $i){,_rootfs,_sqfs} 36 | mount -v $i $(basename $i) 37 | # iso/LiveOS/squashfs.img is a SquashFS image containing an ext4 image. 38 | mount -v $(basename $i)/LiveOS/squashfs.img \ 39 | $(basename $i)_sqfs 40 | # The ext4 image in question. 41 | mount -v $(basename $i)_sqfs/LiveOS/rootfs.img \ 42 | $(basename $i)_rootfs 43 | # Collect keys. 44 | for k in $(basename $i)_rootfs/etc/ssh/ssh_host_*_key*; do 45 | if [ -e ${k} ]; then 46 | cp -v ${k} revoked/$(basename $i)_$(basename $k) 47 | fi 48 | done 49 | umount -Rf $(basename $i)_rootfs 50 | umount -Rf $(basename $i)_sqfs 51 | umount -Rf $(basename $i) 52 | rm -r $(basename $i){,_rootfs,_sqfs} 53 | done 54 | 55 | # Same as above for individual SquashFS images. 56 | for i in /mirror/aosc-os/os-*/*/*.squashfs; do 57 | mkdir $(basename $i) 58 | mount -v $i $(basename $i) 59 | # Collect keys. 60 | for k in $(basename $i)/etc/ssh/ssh_host_*_key*; do 61 | if [ -e ${k} ]; then 62 | cp -v ${k} revoked/$(basename $i)_$(basename $k) 63 | fi 64 | done 65 | umount -Rfv $(basename $i) 66 | rm -r $(basename $i) 67 | done 68 | 69 | if [[ "$(ls revoked | wc -l)" != "0" ]]; then 70 | echo "Found $(ls revoked | wc -l) leaked keys, oops!" 71 | else 72 | echo "No leaked keys found, yay!" 73 | fi 74 | -------------------------------------------------------------------------------- /electron/0001-v8-fix-build-with-gcc7.patch: -------------------------------------------------------------------------------- 1 | --- a/v8/src/objects-body-descriptors.h 2016-07-28 12:17:37.000000000 -1000 2 | +++ b/v8/src/objects-body-descriptors.h 2017-06-16 01:20:50.026665439 -1000 3 | @@ -99,7 +99,7 @@ 4 | 5 | template 6 | static inline void IterateBody(HeapObject* obj, int object_size) { 7 | - IterateBody(obj); 8 | + IterateBody(obj); 9 | } 10 | }; 11 | 12 | diff -uNr chromium-53.0.2785.143.orig//v8//src/objects.h chromium-53.0.2785.143/v8//src/objects.h 13 | --- a/v8//src/objects.h 2016-09-29 09:05:23.000000000 -1000 14 | +++ b/v8//src/objects.h 2017-06-17 00:04:02.287124975 -1000 15 | @@ -3199,22 +3199,10 @@ 16 | public: 17 | typedef Shape ShapeT; 18 | 19 | - // Wrapper methods 20 | - inline uint32_t Hash(Key key) { 21 | - if (Shape::UsesSeed) { 22 | - return Shape::SeededHash(key, GetHeap()->HashSeed()); 23 | - } else { 24 | - return Shape::Hash(key); 25 | - } 26 | - } 27 | - 28 | - inline uint32_t HashForObject(Key key, Object* object) { 29 | - if (Shape::UsesSeed) { 30 | - return Shape::SeededHashForObject(key, GetHeap()->HashSeed(), object); 31 | - } else { 32 | - return Shape::HashForObject(key, object); 33 | - } 34 | - } 35 | + // Wrapper methods. Defined in src/objects-inl.h 36 | + // to break a cycle with src/heap/heap.h. 37 | + inline uint32_t Hash(Key key); 38 | + inline uint32_t HashForObject(Key key, Object* object); 39 | 40 | // Returns a new HashTable object. 41 | MUST_USE_RESULT static Handle New( 42 | diff -uNr chromium-53.0.2785.143.orig//v8//src/objects-inl.h chromium-53.0.2785.143/v8//src/objects-inl.h 43 | --- a//v8//src/objects-inl.h 2016-09-29 09:05:23.000000000 -1000 44 | +++ b/v8//src/objects-inl.h 2017-06-17 00:03:08.202099672 -1000 45 | @@ -38,6 +38,26 @@ 46 | namespace v8 { 47 | namespace internal { 48 | 49 | +template 50 | +uint32_t HashTable::Hash(Key key) { 51 | + if (Shape::UsesSeed) { 52 | + return Shape::SeededHash(key, GetHeap()->HashSeed()); 53 | + } else { 54 | + return Shape::Hash(key); 55 | + } 56 | +} 57 | + 58 | + 59 | +template 60 | +uint32_t HashTable::HashForObject(Key key, 61 | + Object* object) { 62 | + if (Shape::UsesSeed) { 63 | + return Shape::SeededHashForObject(key, GetHeap()->HashSeed(), object); 64 | + } else { 65 | + return Shape::HashForObject(key, object); 66 | + } 67 | +} 68 | + 69 | PropertyDetails::PropertyDetails(Smi* smi) { 70 | value_ = smi->value(); 71 | } 72 | --- a/v8/src/heap/mark-compact.cc 2016-09-29 09:05:23.000000000 -1000 73 | +++ b/v8/src/heap/mark-compact.cc 2017-06-17 07:19:14.738159288 -1000 74 | @@ -3614,6 +3614,9 @@ 75 | #endif 76 | } 77 | 78 | +#pragma GCC diagnostic push 79 | +#pragma GCC diagnostic warning "-Wattributes" 80 | + 81 | template 82 | class PointerUpdateJobTraits { 83 | public: 84 | @@ -3701,6 +3704,8 @@ 85 | } 86 | }; 87 | 88 | +#pragma GCC diagnostic pop 89 | + 90 | int NumberOfPointerUpdateTasks(int pages) { 91 | if (!FLAG_parallel_pointer_update) return 1; 92 | const int kMaxTasks = 4; 93 | -------------------------------------------------------------------------------- /electron/0002-WebKit-fix-build-with-gcc7-full.patch: -------------------------------------------------------------------------------- 1 | --- a/third_party/WebKit/Source/wtf/LinkedHashSet.h 2017-11-24 02:03:48.045943722 -0500 2 | +++ b/third_party/WebKit/Source/wtf/LinkedHashSet.h 2017-11-24 02:04:34.212508077 -0500 3 | @@ -687,6 +687,8 @@ 4 | return *this; 5 | } 6 | 7 | +inline void swapAnchor(LinkedHashSetNodeBase& a, LinkedHashSetNodeBase& b); 8 | + 9 | template 10 | inline void LinkedHashSet::swap(LinkedHashSet& other) { 11 | m_impl.swap(other.m_impl); 12 | --- a/third_party/WebKit/Source/platform/graphics/gpu/SharedGpuContext.h.bak 2017-11-24 13:42:38.842944797 -0500 13 | +++ b/third_party/WebKit/Source/platform/graphics/gpu/SharedGpuContext.h 2017-11-24 13:43:06.219524249 -0500 14 | @@ -4,7 +4,7 @@ 15 | 16 | #include "platform/PlatformExport.h" 17 | #include "wtf/ThreadSpecific.h" 18 | - 19 | +#include 20 | #include 21 | 22 | namespace gpu { 23 | -------------------------------------------------------------------------------- /electron/0005-no-bundled-sysroot.patch: -------------------------------------------------------------------------------- 1 | --- a/toolchain.gypi 2 | +++ b/toolchain.gypi 3 | @@ -53,10 +53,10 @@ 4 | 'sysroot%': '<(source_root)/vendor/debian_wheezy_arm-sysroot', 5 | }], 6 | ['target_arch=="ia32"', { 7 | - 'sysroot%': '<(source_root)/vendor/debian_wheezy_i386-sysroot', 8 | + 'sysroot%': '/', 9 | }], 10 | ['target_arch=="x64"', { 11 | - 'sysroot%': '<(source_root)/vendor/debian_wheezy_amd64-sysroot', 12 | + 'sysroot%': '/', 13 | }], 14 | ], 15 | }, 16 | -------------------------------------------------------------------------------- /electron/0006-gn-bootstrap-fix.patch: -------------------------------------------------------------------------------- 1 | # Patch taken from Gentoo. 2 | # Fixes gn bootstrapping error: 3 | # thread_task_runner_handle.cc:(.text+0x84b): undefined reference to `base::ScopedClosureRunner::ScopedClosureRunner 4 | 5 | Index: tools/gn/bootstrap/bootstrap.py 6 | diff --git a/tools/gn/bootstrap/bootstrap.py b/tools/gn/bootstrap/bootstrap.py 7 | index 38cfb117d29c3895291379f00d8dc8c8b0727474..679170e610f8292bcbeb76508fd247d322a69c79 100755 8 | --- a/tools/gn/bootstrap/bootstrap.py 9 | +++ b/tools/gn/bootstrap/bootstrap.py 10 | @@ -385,6 +385,7 @@ def write_gn_ninja(path, root_gen_dir, options): 11 | 'base/base_switches.cc', 12 | 'base/build_time.cc', 13 | 'base/callback_internal.cc', 14 | + 'base/callback_helpers.cc', 15 | 'base/command_line.cc', 16 | 'base/debug/activity_tracker.cc', 17 | 'base/debug/alias.cc', 18 | -------------------------------------------------------------------------------- /electron/0007-libchromiumcontent-settings.patch: -------------------------------------------------------------------------------- 1 | --- a/chromiumcontent/args/static_library.gn 2 | +++ b/chromiumcontent/args/static_library.gn 3 | @@ -2,10 +2,22 @@ 4 | is_electron_build = true 5 | is_component_build = false 6 | is_debug = false 7 | -symbol_level = 2 8 | +symbol_level = 0 9 | enable_nacl = false 10 | enable_widevine = true 11 | proprietary_codecs = true 12 | is_component_ffmpeg = true 13 | ffmpeg_branding = "Chrome" 14 | use_gold = false 15 | +use_clang = false 16 | +clang_use_chrome_plugins = false 17 | +fatal_linker_warnings = false 18 | +treat_warnings_as_errors = false 19 | +fieldtrial_testing_like_official_build = true 20 | +remove_webcore_debug_symbols = true 21 | +link_pulseaudio = true 22 | +linux_use_bundled_binutils = false 23 | +use_cups = true 24 | +use_gconf = false 25 | +use_gtk3 = true 26 | +use_sysroot = false 27 | --- a/chromiumcontent/BUILD.gn 28 | +++ b/chromiumcontent/BUILD.gn 29 | @@ -32,7 +32,7 @@ 30 | ] 31 | 32 | if (is_linux) { 33 | - deps += [ "//chrome/browser/ui/libgtkui:libgtk2ui" ] 34 | + deps += [ "//chrome/browser/ui/libgtkui:libgtk3ui" ] 35 | } 36 | 37 | if (is_component_build) { 38 | @@ -154,13 +154,13 @@ 39 | static_library("libgtkui") { 40 | complete_static_lib = true 41 | sources = [ 42 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/app_indicator_icon.o", 43 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/app_indicator_icon_menu.o", 44 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/gtk_util.o", 45 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/gtk_status_icon.o", 46 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/menu_util.o", 47 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/skia_utils_gtk.o", 48 | - "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk2ui/unity_service.o", 49 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/app_indicator_icon.o", 50 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/app_indicator_icon_menu.o", 51 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/gtk_util.o", 52 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/gtk_status_icon.o", 53 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/menu_util.o", 54 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/skia_utils_gtk.o", 55 | + "$root_out_dir/obj/chrome/browser/ui/libgtkui/libgtk3ui/unity_service.o", 56 | ] 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /electron/0008-chromium-unset-madv_free.patch: -------------------------------------------------------------------------------- 1 | --- a/third_party/WebKit/Source/wtf/allocator/PageAllocator.cpp 2 | +++ b/third_party/WebKit/Source/wtf/allocator/PageAllocator.cpp 3 | @@ -41,6 +41,11 @@ 4 | #include 5 | #include 6 | 7 | +#if OS(LINUX) && defined(MADV_FREE) 8 | +// Added in Linux 4.5, but it breaks the sandbox. 9 | +#undef MADV_FREE 10 | +#endif 11 | + 12 | #ifndef MADV_FREE 13 | #define MADV_FREE MADV_DONTNEED 14 | #endif 15 | -------------------------------------------------------------------------------- /electron/0009-chromium-use-system-re2.patch: -------------------------------------------------------------------------------- 1 | --- a/build/linux/unbundle/re2.gn 2 | +++ b/build/linux/unbundle/re2.gn 3 | @@ -11,7 +11,6 @@ 4 | "re2/re2.h", 5 | "re2/set.h", 6 | "re2/stringpiece.h", 7 | - "re2/variadic_function.h", 8 | ] 9 | } 10 | 11 | -------------------------------------------------------------------------------- /electron/0010-chromium-use-system-ffmpeg.patch: -------------------------------------------------------------------------------- 1 | --- a/media/ffmpeg/ffmpeg_common.h 2 | +++ b/media/ffmpeg/ffmpeg_common.h 3 | @@ -22,10 +22,6 @@ 4 | 5 | // Include FFmpeg header files. 6 | extern "C" { 7 | -// Disable deprecated features which result in spammy compile warnings. This 8 | -// list of defines must mirror those in the 'defines' section of FFmpeg's 9 | -// BUILD.gn file or the headers below will generate different structures! 10 | -#define FF_API_CONVERGENCE_DURATION 0 11 | // Upstream libavcodec/utils.c still uses the deprecated 12 | // av_dup_packet(), causing deprecation warnings. 13 | // The normal fix for such things is to disable the feature as below, 14 | @@ -39,7 +35,6 @@ 15 | MSVC_PUSH_DISABLE_WARNING(4244); 16 | #include 17 | #include 18 | -#include 19 | #include 20 | #include 21 | #include 22 | --- a/media/filters/ffmpeg_demuxer.cc 23 | +++ b/media/filters/ffmpeg_demuxer.cc 24 | @@ -1198,24 +1198,6 @@ 25 | // If no estimate is found, the stream entry will be kInfiniteDuration. 26 | std::vector start_time_estimates(format_context->nb_streams, 27 | kInfiniteDuration); 28 | - const AVFormatInternal* internal = format_context->internal; 29 | - if (internal && internal->packet_buffer && 30 | - format_context->start_time != static_cast(AV_NOPTS_VALUE)) { 31 | - struct AVPacketList* packet_buffer = internal->packet_buffer; 32 | - while (packet_buffer != internal->packet_buffer_end) { 33 | - DCHECK_LT(static_cast(packet_buffer->pkt.stream_index), 34 | - start_time_estimates.size()); 35 | - const AVStream* stream = 36 | - format_context->streams[packet_buffer->pkt.stream_index]; 37 | - if (packet_buffer->pkt.pts != static_cast(AV_NOPTS_VALUE)) { 38 | - const base::TimeDelta packet_pts = 39 | - ConvertFromTimeBase(stream->time_base, packet_buffer->pkt.pts); 40 | - if (packet_pts < start_time_estimates[stream->index]) 41 | - start_time_estimates[stream->index] = packet_pts; 42 | - } 43 | - packet_buffer = packet_buffer->next; 44 | - } 45 | - } 46 | 47 | std::unique_ptr media_tracks(new MediaTracks()); 48 | 49 | -------------------------------------------------------------------------------- /electron/0011-chromium-gtk3.patch: -------------------------------------------------------------------------------- 1 | --- a/chrome/browser/ui/libgtkui/gtk_ui.cc 2 | +++ b/chrome/browser/ui/libgtkui/gtk_ui.cc 3 | @@ -411,6 +411,10 @@ 4 | } // namespace 5 | 6 | Gtk2UI::Gtk2UI() : middle_click_action_(GetDefaultMiddleClickAction()) { 7 | +#if GTK_MAJOR_VERSION > 2 8 | + g_setenv("GDK_SCALE", "1", FALSE); 9 | + gdk_set_allowed_backends("x11"); 10 | +#endif 11 | GtkInitFromCommandLine(*base::CommandLine::ForCurrentProcess()); 12 | } 13 | 14 | -------------------------------------------------------------------------------- /electron/0012-brightray-use-system-ffmpeg.patch: -------------------------------------------------------------------------------- 1 | --- a/brightray.gyp 2 | +++ b/brightray.gyp 3 | @@ -130,8 +130,6 @@ 4 | }, { 5 | 'link_settings': { 6 | 'libraries': [ 7 | - # Link with ffmpeg. 8 | - '<(libchromiumcontent_dir)/libffmpeg.so', 9 | # Following libraries are required by libchromiumcontent: 10 | '-lasound', 11 | '-lcap', 12 | -------------------------------------------------------------------------------- /electron/0013-brightray-no-whole-archive.patch: -------------------------------------------------------------------------------- 1 | --- a/brightray.gyp 2 | +++ b/brightray.gyp 3 | @@ -41,14 +41,13 @@ 4 | 'conditions': [ 5 | # Link with libraries of libchromiumcontent. 6 | ['OS=="linux" and libchromiumcontent_component==0', { 7 | - # On Linux we have to use "--whole-archive" to force executable 8 | - # to include all symbols, otherwise we will have plenty of 9 | + # On Linux we have to use "--start-group" or we will have plenty of 10 | # unresolved symbols errors. 11 | - 'direct_dependent_settings': { 12 | - 'ldflags': [ 13 | - '-Wl,--whole-archive', 14 | + 'link_settings': { 15 | + 'libraries': [ 16 | + '-Wl,--start-group', 17 | '<@(libchromiumcontent_libraries)', 18 | - '-Wl,--no-whole-archive', 19 | + '-Wl,--end-group' 20 | ], 21 | } 22 | }, { # (Release build on Linux) 23 | -------------------------------------------------------------------------------- /electron/0014-use-system-libraries-in-node.patch: -------------------------------------------------------------------------------- 1 | --- a/common.gypi 2 | +++ b/common.gypi 3 | @@ -22,12 +22,12 @@ 4 | 'node_install_npm': 'false', 5 | 'node_prefix': '', 6 | 'node_shared': 'true', 7 | - 'node_shared_cares': 'false', 8 | - 'node_shared_http_parser': 'false', 9 | + 'node_shared_cares': 'true', 10 | + 'node_shared_http_parser': 'true', 11 | 'node_shared_libuv': 'false', 12 | 'node_shared_openssl': 'false', 13 | 'node_shared_v8': 'true', 14 | - 'node_shared_zlib': 'false', 15 | + 'node_shared_zlib': 'true', 16 | 'node_tag': '', 17 | 'node_use_dtrace': 'false', 18 | 'node_use_etw': 'false', 19 | @@ -109,18 +109,6 @@ 20 | 'conditions': [ 21 | ['OS=="linux"', { 22 | 'cflags': [ 23 | - '-Wno-parentheses-equality', 24 | - '-Wno-unused-function', 25 | - '-Wno-sometimes-uninitialized', 26 | - '-Wno-pointer-sign', 27 | - '-Wno-string-plus-int', 28 | - '-Wno-unused-variable', 29 | - '-Wno-unused-value', 30 | - '-Wno-deprecated-declarations', 31 | - '-Wno-return-type', 32 | - '-Wno-shift-negative-value', 33 | - '-Wno-format', 34 | - '-Wno-varargs', # https://git.io/v6Olj 35 | # Required when building as shared library. 36 | '-fPIC', 37 | ], 38 | @@ -160,6 +148,12 @@ 39 | ], 40 | }], 41 | ['OS=="linux" and libchromiumcontent_component==0', { 42 | + 'libraries': [ 43 | + '-lcares', 44 | + '-lcrypto', 45 | + '-lhttp_parser', 46 | + '-lz', 47 | + ], 48 | # Prevent the linker from stripping symbols. 49 | 'ldflags': [ 50 | '-Wl,--whole-archive', 51 | -------------------------------------------------------------------------------- /electron/additional_patches_list: -------------------------------------------------------------------------------- 1 | RECORDED=('1.6' '1.7') 2 | COMMON_PATCHES=('0001-v8-fix-build-with-gcc7.patch' 3 | '0002-WebKit-fix-build-with-gcc7-full.patch' 4 | '0004-fix-build-on-glibc-2.26.patch' 5 | '0005-no-bundled-sysroot.patch' 6 | '0007-libchromiumcontent-settings.patch' 7 | '0009-chromium-use-system-re2.patch' 8 | '0010-chromium-use-system-ffmpeg.patch') 9 | PATCHES_1_6=(${COMMON_PATCHES[@]} '0008-chromium-unset-madv_free.patch' 10 | '0011-chromium-gtk3.patch') 11 | PATCHES_1_7=(${COMMON_PATCHES[@]} '0006-gn-bootstrap-fix.patch') 12 | -------------------------------------------------------------------------------- /electron/collect-abflags.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TMP="$(mktemp -d)" 3 | mkdir "${TMP}/autobuild" 4 | cat << EOF > "${TMP}/autobuild/defines" 5 | PKGNAME=test 6 | PKGDES=test 7 | PKGVER=1 8 | PKGREL=0 9 | EOF 10 | 11 | FLAGS_SH=$(readlink -f "${TMP}/flags.sh") 12 | cat << EOS > "${TMP}/autobuild/build" 13 | echo "export CFLAGS='\${CFLAGS}'" > ${FLAGS_SH} 14 | echo "export CXXFLAGS='\${CXXFLAGS}'" >> ${FLAGS_SH} 15 | echo "export LDFLAGS='\${LDFLAGS}'" >> ${FLAGS_SH} 16 | exit 1 17 | EOS 18 | 19 | pushd "${TMP}" 20 | autobuild 21 | source "${FLAGS_SH}" 22 | popd 23 | rm -rf "${TMP}" 24 | -------------------------------------------------------------------------------- /find-deps/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /find-deps/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "find-deps" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [dependencies] 7 | anyhow = "1" 8 | oma-contents = "0.17" 9 | clap = { version = "4.5", features = ["derive"] } 10 | -------------------------------------------------------------------------------- /find-deps/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | io::BufRead, 4 | path::{Path, PathBuf}, 5 | process::{Command, exit}, 6 | }; 7 | 8 | use anyhow::{Context, Result}; 9 | use clap::Parser; 10 | use oma_contents::searcher::{self, Mode}; 11 | 12 | #[derive(Debug, Parser)] 13 | struct App { 14 | /// Display match package binary path 15 | #[arg(long)] 16 | print_paths: bool, 17 | /// Binary path 18 | path: PathBuf, 19 | /// Only display +32 Package(s) result 20 | #[arg(long)] 21 | optenv32: bool, 22 | /// Allow search non /usr/lib path 23 | #[arg(long)] 24 | all_prefix: bool, 25 | /// Print result to one line 26 | #[arg(long)] 27 | oneline: bool, 28 | } 29 | 30 | fn main() -> Result<()> { 31 | let App { 32 | print_paths, 33 | path, 34 | optenv32, 35 | all_prefix, 36 | oneline, 37 | } = App::parse(); 38 | 39 | let cmd = Command::new("readelf") 40 | .env("LANG", "C") 41 | .arg("-d") 42 | .arg(path) 43 | .output()?; 44 | 45 | if !cmd.status.success() { 46 | eprint!("{}", String::from_utf8_lossy(&cmd.stderr)); 47 | exit(cmd.status.code().unwrap_or(1)); 48 | } 49 | 50 | let output = cmd.stdout.lines(); 51 | 52 | let mut deps = vec![]; 53 | 54 | for o in output { 55 | let o = o?; 56 | if !o.contains("(NEEDED)") { 57 | continue; 58 | } 59 | let Some(lib) = o.split_ascii_whitespace().next_back() else { 60 | continue; 61 | }; 62 | 63 | deps.push( 64 | lib.strip_prefix('[') 65 | .and_then(|x| x.strip_suffix(']')) 66 | .context("Failed to parse readelf output")? 67 | .to_string(), 68 | ); 69 | } 70 | 71 | let mut map: HashMap> = HashMap::new(); 72 | 73 | for dep in deps { 74 | searcher::search("/var/lib/apt/lists", Mode::Provides, &dep, |(pkg, path)| { 75 | if path.ends_with(&format!("/{}", dep)) { 76 | map.entry(pkg).or_default().push(path); 77 | } 78 | }) 79 | .ok(); 80 | } 81 | 82 | let mut result = map.into_iter().collect::>(); 83 | result.sort_unstable_by(|a, b| a.0.cmp(&b.0)); 84 | 85 | result 86 | .iter() 87 | .filter(|x| !optenv32 || x.0.ends_with("+32")) 88 | .filter(|x| { 89 | (all_prefix || optenv32) 90 | || x.1.iter().map(|p| Path::new(p)).any(|x| { 91 | x.parent() 92 | .is_some_and(|x| x.to_string_lossy() == "/usr/lib") 93 | }) 94 | }) 95 | .for_each(|x| { 96 | if print_paths { 97 | println!("{} [{}]", x.0, x.1.join(",")) 98 | } else if oneline { 99 | print!("{} ", x.0); 100 | } else { 101 | println!("{}", x.0); 102 | } 103 | }); 104 | 105 | if oneline { 106 | println!(); 107 | } 108 | 109 | Ok(()) 110 | } 111 | -------------------------------------------------------------------------------- /findupd/README.md: -------------------------------------------------------------------------------- 1 | ## Update Outdated Packages (Quick-n-Easy) 2 | 3 | 1. Go to an ABBS tree 4 | 5 | ```$ cd /path/to/abbs-tree``` 6 | 7 | 2. Move all scripts to $PATH (e.g. /usr/local/bin) 8 | 9 | ``` 10 | $ mv findupd* /usr/local/bin 11 | $ mv *.py /usr/local/bin 12 | ``` 13 | 14 | 3. Check for updates 15 | 16 | ``` 17 | # For all updates 18 | $ findupd 19 | 20 | # For patch-level (a.k.a. stable-proposed) updates 21 | $ findupd-stable 22 | ``` 23 | 24 | ## Update Outdated Packages (Stick Shift Mastery) 25 | 26 | 1. Go to an ABBS tree 27 | 28 | ```$ cd /path/to/abbs-tree``` 29 | 30 | 2. Move all scripts to $PATH (e.g. /usr/local/bin) 31 | 32 | ``` 33 | $ mv findupd* /usr/local/bin 34 | $ mv *.py /usr/local/bin 35 | ``` 36 | 37 | 3. Dump repology's outdated pkgs info. to local json file 38 | 39 | ```$ python3 update-pkgs.py -d ../repology.json``` 40 | 41 | 4. Search for updates (e.g. In the extra-graphics only) 42 | 43 | ``` 44 | $ python3 update-pkgs.py -j ../repology.json -c extra-graphics -r 45 | ``` 46 | 47 | ### Save Update Data Source to File 48 | 49 | ``` 50 | $ python3 update-pkgs.py -j ../repology.json -s cache.txt 51 | ``` 52 | 53 | ### Load Saved Data Source (as cache) 54 | 55 | ``` 56 | $ python3 update-pkgs.py -l cache.txt -c extra-graphics -r -q 57 | ``` 58 | 59 | ### Use AOSC OS Packages Site's Data Source 60 | 61 | 1. $ wget "https://packages.aosc.io/srcupd/aosc-os-abbs?type=json&page=all" -O packages.json 62 | 63 | 2. $ python3 update-pkgs.py -j ../packages.json -c extra-graphics -r 64 | 65 | ## Rebuild Packages 66 | 67 | 1. Dump the packages list to rebuild 68 | 69 | ``` 70 | $ apt list $(apt-cache rdepends mlt | sort -u) > /path/to/mlt.txt 71 | ``` 72 | 73 | 2. Go to an ABBS tree 74 | 75 | ``` 76 | $ cd /path/to/abbs-tree 77 | ``` 78 | 79 | 3. Automatically bump REL in repo 80 | 81 | ``` 82 | $ python3 rebuild.py /path/to/mlt.txt 83 | ``` 84 | -------------------------------------------------------------------------------- /findupd/findupd: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -rf /tmp/repology.json 4 | update-pkgs.py -d /tmp/repology.json 5 | update-pkgs.py -j /tmp/repology.json -s /tmp/cache.txt -q 6 | 7 | for i in "$@"; do 8 | update-pkgs.py -l /tmp/cache.txt -c $i -r -q 9 | done 10 | 11 | rm -rf /tmp/cache.txt 12 | -------------------------------------------------------------------------------- /findupd/rebuild.py: -------------------------------------------------------------------------------- 1 | #!/bin/python3 2 | 3 | import sys 4 | import os 5 | import subprocess 6 | 7 | 8 | def read_file(src_path): 9 | with open(src_path, "r") as f: 10 | return f.readlines() 11 | 12 | 13 | def write_file(dest_path, contents): 14 | with open(dest_path, "w") as f: 15 | f.writelines(contents) 16 | 17 | 18 | def get_pkgs(filename): 19 | contents = read_file(filename) 20 | pkgs = [] 21 | for line in contents: 22 | if 'os-' in line: 23 | pkg = line.split('/')[0] 24 | pkgs.append(pkg) 25 | 26 | return pkgs 27 | 28 | 29 | def find_spec(pkgname): 30 | print("Bump %s ..." % pkgname) 31 | result = subprocess.run( 32 | ['find', '.', '-name', pkgname], stdout=subprocess.PIPE) 33 | filepaths = result.stdout.decode('utf-8').split('\n') 34 | for f in filepaths: 35 | specfile = os.path.join(f, 'spec') 36 | if os.path.isfile(specfile): 37 | return specfile 38 | return None 39 | 40 | 41 | def bump_rel(spec_path): 42 | orig_spec = read_file(spec_path) 43 | contents = [] 44 | has_rel = False 45 | for line in orig_spec: 46 | if 'REL=' in line: 47 | cur_rel = line.split('=')[-1].strip() 48 | new_rel = str(int(cur_rel) + 1) 49 | contents.append(line.replace(cur_rel, new_rel)) 50 | has_rel = True 51 | elif 'REL=' not in line: 52 | contents.append(line) 53 | 54 | if not has_rel: 55 | contents.insert(1, "REL=1\n") 56 | write_file(spec_path, contents) 57 | 58 | 59 | if __name__ == "__main__": 60 | pkgs = get_pkgs(sys.argv[1]) 61 | for pkg in pkgs: 62 | bump_rel(find_spec(pkg)) 63 | -------------------------------------------------------------------------------- /gen-binutils-cross.py: -------------------------------------------------------------------------------- 1 | for arch in ["amd64", "arm64", "loongarch64", "loongson3", "mips64r6el", "powerpc", "ppc64", "ppc64el", "riscv64"]: 2 | folder = f"binutils+cross-{arch}" 3 | with open(f"{folder}/spec", "w") as f: 4 | print("""VER=2.42 5 | SRCS="tbl::https://ftp.gnu.org/gnu/binutils/binutils-$VER.tar.xz" 6 | CHKSUMS="sha256::f6e4d41fd5fc778b06b7891457b3620da5ecea1006c6a4a41ae998109f85a800" 7 | CHKUPDATE="anitya::id=7981" 8 | """.strip(), file=f) 9 | with open(f"{folder}/autobuild/defines", "w") as f: 10 | print(f"""PKGNAME=binutils+cross-{arch} 11 | PKGDEP="glibc" 12 | BUILDDEP="flex xz elfutils" 13 | PKGSEC=devel 14 | PKGDES="Binutils for {arch} cross build" 15 | """.strip(), file=f) 16 | with open(f"{folder}/autobuild/beyond", "w") as f: 17 | print(f"""abinfo "Dropping texinfo dir ..." 18 | rm -v "$PKGDIR"/opt/abcross/{arch}/share/info/dir 19 | """.strip(), file=f) 20 | with open(f"{folder}/autobuild/build", "w") as f: 21 | target = { 22 | "amd64": "x86_64-aosc-linux-gnu", 23 | "arm64": "aarch64-aosc-linux-gnu", 24 | "loongarch64": "loongarch64-aosc-linux-gnu", 25 | "loongson3": "mips64el-aosc-linux-gnuabi64", 26 | "mips64r6el": "mipsisa64r6el-aosc-linux-gnuabi64", 27 | "powerpc": "powerpc-aosc-linux-gnu", 28 | "ppc64": "powerpc64-aosc-linux-gnu", 29 | "ppc64el": "powerpc64le-aosc-linux-gnu", 30 | "riscv64": "riscv64-aosc-linux-gnu", 31 | }[arch] 32 | if arch == "amd64": 33 | flags = ["--enable-shared", "--disable-multilib", "--disable-werror"] 34 | elif arch == "arm64": 35 | flags = ["--enable-shared", "--disable-multilib", "--with-arch=armv8-a", "--disable-werror", "--enable-gold"] 36 | elif arch == "loongarch64": 37 | flags = ["--enable-shared", "--disable-multilib", "--with-arch=la464", "--disable-werror", "--disable-gold"] 38 | elif arch == "loongson3": 39 | flags = ["--enable-threads", "--enable-shared", "--with-pic", "--enable-ld", "--enable-plugins", "--disable-werror", "--enable-lto", "--disable-gdb", "--enable-deterministic-archives", "--enable-64-bit-bfd", "--enable-mips-fix-loongson3-llsc"] 40 | elif arch == "mips64r6el": 41 | flags = ["--enable-shared", "--disable-multilib", "--with-arch=mips64r6", "--with-tune=mips64r6", "--disable-werror", "--enable-gold"] 42 | elif arch == "powerpc": 43 | flags = ["--enable-threads", "--enable-shared", "--with-pic", "--enable-ld", "--enable-plugins", "--disable-werror", "--enable-lto", "--disable-gdb", "--enable-deterministic-archives", "--enable-64-bit-bfd"] 44 | elif arch == "ppc64": 45 | flags = ["--enable-threads", "--enable-shared", "--with-pic", "--enable-ld", "--enable-plugins", "--disable-werror", "--enable-lto", "--disable-gdb", "--enable-deterministic-archives", "--enable-64-bit-bfd"] 46 | elif arch == "ppc64el": 47 | flags = ["--enable-threads", "--enable-shared", "--with-pic", "--enable-ld", "--enable-plugins", "--disable-werror", "--enable-lto", "--disable-gdb", "--enable-deterministic-archives", "--enable-targets=powerpc-linux", "--enable-64-bit-bfd"] 48 | elif arch == "riscv64": 49 | flags = ["--enable-shared", "--disable-multilib", "--disable-werror", "--with-isa-spec=2.2"] 50 | extra_flags = " \\\n ".join(flags) 51 | print(f"""# Auto-generated by scriptlets/gen-binutils-cross.py 52 | abinfo "Clearing compiler flags in environment..." 53 | unset CFLAGS CXXFLAGS CPPFLAGS LDFLAGS 54 | 55 | abinfo "Configuring binutils..." 56 | mkdir -pv "$SRCDIR"/build 57 | cd "$SRCDIR"/build 58 | 59 | ../configure \\ 60 | --prefix=/opt/abcross/{arch} \\ 61 | --target={target} \\ 62 | --with-sysroot=/var/ab/cross-root/{arch} \\ 63 | {extra_flags} 64 | 65 | abinfo "Building binutils..." 66 | make configure-host 67 | make 68 | 69 | abinfo "Installing binutils to target directory..." 70 | make DESTDIR=$PKGDIR install 71 | """.strip(), file=f) 72 | -------------------------------------------------------------------------------- /genaffect/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /genaffect/README.md: -------------------------------------------------------------------------------- 1 | genaffect 2 | ========= 3 | 4 | Generates a list of affected packages for topic pull requests. 5 | 6 | ``` 7 | Usage: 8 | 9 | genaffect PACKAGE_GROUP 10 | 11 | - PACKAGE_GROUP: Path to the list of packages to generate affected list from. 12 | (Example: TREE/groups/plasma) 13 | ``` 14 | -------------------------------------------------------------------------------- /genaffect/genaffect: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | _help_message() { 4 | printf "\ 5 | Generates a list of affected packages for topic pull requests. 6 | 7 | Usage: 8 | 9 | genaffect PACKAGE_GROUP 10 | 11 | - PACKAGE_GROUP: Path to the list of packages to generate affected list from. 12 | (Example: TREE/groups/plasma) 13 | 14 | " 15 | } 16 | 17 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 18 | _help_message 19 | exit 0 20 | fi 21 | 22 | if [ -z "$1" ]; then 23 | echo -e "[!!!] Please specify a package group.\n" 24 | _help_message 25 | exit 1 26 | fi 27 | 28 | for i in `cat $1`; do 29 | source $i/spec 30 | printf -- "- \`$(echo $i | cut -f2 -d/)\` v${VER}-${REL:-0}\n" 31 | done | sort 32 | -------------------------------------------------------------------------------- /generate-test-queue/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /generate-test-queue/README.md: -------------------------------------------------------------------------------- 1 | generate-test-queue 2 | ------- 3 | 4 | Generates a weekly (from time of running) update report for stable-proposed. 5 | 6 | ``` 7 | Usage: 8 | 9 | generate-test-queue.sh LDAP_USERNAME 10 | ``` 11 | 12 | **Mock implementation, improvements pending...** 13 | 14 | - Add output for version delta. 15 | - Re-implement in some more efficient language. 16 | -------------------------------------------------------------------------------- /generate-test-queue/generate-test-queue: -------------------------------------------------------------------------------- 1 | _help_message() { 2 | printf "\ 3 | Usage: 4 | 5 | $0 LDAP_USERNAME 6 | 7 | " 8 | } 9 | 10 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 11 | echo -e "Generates a weekly update report for stable-proposed.\n" 12 | _help_message 13 | exit 0 14 | fi 15 | 16 | if [ -z "$1" ]; then 17 | echo -e "[!!!] Please specify a LDAP user!\n" 18 | _help_message 19 | exit 1 20 | fi 21 | 22 | ssh $1@repo.aosc.io "cd /mirror/debs/pool/stable-proposed; find . -name '*.deb'" > list-stable-proposed 23 | 24 | perl -ne 'my $pattern = qr/\/(.*?)\/.*?\/(.*?)_(.*?)(?>-0)?_(.*?)\.deb/mp;my @matches = =~ /$pattern/;print "- [ ] @matches[0](@matches[3]): @matches[1] @matches[2]\n";' < list-stable-proposed | head -n-1 | sort 25 | -------------------------------------------------------------------------------- /grow-rootfs: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | set -e 4 | 5 | # Grow the last partition to the last of the disk. 6 | 7 | ROOTDEV= 8 | ROOTPART= 9 | ROOTTYPE= 10 | PARTNUM= 11 | 12 | echo "[+] Preparing to grow..." 13 | 14 | if [ "$(mount | grep 'on / type' | grep nfs)" ]; then 15 | echo "[-] No joking around, please. You are using a NFS." 16 | exit 1 17 | fi 18 | 19 | if [ "$1" == "yes-please" ]; then 20 | echo "[+] You said yes, so we just go on." 21 | elif [ -t 0 -a -t 1 ]; then 22 | read -p "Are you sure (y/N)? " ANS 23 | if ! echo "$ANS" | grep -q '[yY]' ; then 24 | echo "[+] Aborting." 25 | exit 0 26 | fi 27 | echo "[+] Okay, you choose to take the risk." 28 | else 29 | echo "[+] You are running this with scripts. You have taken the risk." 30 | fi 31 | 32 | if [ -e "/.resize-partition" ]; then 33 | # Resize partition 34 | source "/.resize-partition" 35 | case "$ROOTTYPE" in 36 | "ext4") 37 | resize2fs /dev/$ROOTPART 38 | ;; 39 | "btrfs") 40 | btrfs filesystem resize max / 41 | ;; 42 | *) 43 | ;; 44 | esac 45 | rm /.resize-partition 46 | exit 0 47 | else 48 | ROOTPART=$(lsblk -lno NAME,MOUNTPOINT | sed -E 's/\s+/,/g' | grep ',/$' | cut -d',' -f1) 49 | ROOTDEV="/dev/$(lsblk -lno PKNAME /dev/$ROOTPART)" 50 | ROOTTYPE=$(lsblk -lno FSTYPE /dev/$ROOTPART) 51 | # Get current partition number 52 | # PARTNUM=$(lsblk -lno "MAJ:MIN" /dev/$ROOTPART | cut -d':' -f2) - It is not reliable 53 | # Use the old school method 54 | PARTNUMS=($(echo "$ROOTPART" | grep -oE '[0-9]+')) 55 | PARTNUM=${PARTNUMS[-1]} 56 | echo "[+] Root partition: $ROOTPART" 57 | echo "[+] Device the root partition is in: $ROOTDEV" 58 | echo "[+] Root filesystem: $ROOTTYPE" 59 | echo "[+] Number of the root partition: $PARTNUM" 60 | if [ "$DEBUG" ]; then 61 | echo "[D] That's all we need to know." 62 | exit 0 63 | fi 64 | if [ "$ROOTDEV" -a "$ROOTPART" -a "$ROOTTYPE" -a "$PARTNUM" ]; then 65 | # Get partitions 66 | PARTS=($(lsblk -o NAME -ln $ROOTDEV)) 67 | echo "[+] Last partition of this disk: ${PARTS[-1]}" 68 | if [ "$ROOTPART" != "${PARTS[-1]}" ]; then 69 | echo "[-] Jesus, don't even think about it; Your partition is not the last partition!" 70 | exit 1 71 | fi 72 | echo "[+] Okay, your root partition is the last partition. Proceeding." 73 | # Resize partition 74 | # Call sfdisk to grow this partition, gently 75 | # It will not touch anything else. 76 | echo ', +' | sfdisk --force -N $PARTNUM $ROOTDEV 77 | # Okay, we are ready to grow 78 | touch "/.resize-partition" 79 | echo "ROOTDEV=$ROOTDEV" >> /.resize-partition 80 | echo "ROOTPART=$ROOTPART" >> /.resize-partition 81 | echo "ROOTTYPE=$ROOTTYPE" >> /.resize-partition 82 | echo "PARTNUM=$PARTNUM" >> /.resize-partition 83 | fi 84 | fi 85 | 86 | -------------------------------------------------------------------------------- /kernel-template-postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Generate initrd using Dracut. 3 | if [ -x /usr/bin/dracut ]; then 4 | for i in /usr/lib/modules/*; do 5 | case $i in (*extramodules*) continue;; esac 6 | echo -e "\033[36m**\033[0m\tGenerating initrd (Initialization RAM Disk) for kernel version $i ..." 7 | dracut -q --force /boot/initramfs-"$i".img "$i" 8 | done 9 | else 10 | echo -e "\033[33m**\033[0m\tCommand \"dracut\" is not installed, skipping generation.\n\tYou may not be able to boot the new kernel on the next boot." 11 | fi 12 | 13 | # TODO: Support for multiple other initramfs/initrd manager. 14 | # TODO: Write in signal for PS1 to show "reboot" demand. 15 | -------------------------------------------------------------------------------- /leaves.sql: -------------------------------------------------------------------------------- 1 | -- Run it on the repo server 2 | -- Requires R/O permission on the meta database of the packages-site 3 | SELECT DISTINCT name 4 | FROM v_packages 5 | WHERE branch = 'stable' 6 | AND name NOT IN ( 7 | SELECT DISTINCT dependency 8 | FROM package_dependencies 9 | WHERE relationship IN ('PKGDEP', 'BUILDDEP', 'PKGRECOM', 'PKGSUG') 10 | ) 11 | ORDER BY name; 12 | -------------------------------------------------------------------------------- /list-affected/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /list-affected/README.md: -------------------------------------------------------------------------------- 1 | list-affected 2 | ============= 3 | 4 | A simple script to generate a list of affected packages involved for an 5 | upcoming topic pull request. 6 | 7 | Usage 8 | ----- 9 | 10 | ```bash 11 | ./list-affected foo bar baz 12 | ``` 13 | -------------------------------------------------------------------------------- /list-affected/list-affected: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in $@; do 4 | if [ -e */${i##*/}/spec ]; then 5 | unset VER REL 6 | source */${i##*/}/spec 7 | printf -- "- [ ] \`${i##*/}\` v$VER-$REL\n" 8 | else 9 | echo "Warning: package ${i##*/} is not found in the source tree." 10 | fi 11 | done | \ 12 | sed -e 's/-$//' | sort 13 | -------------------------------------------------------------------------------- /loong64-it/README.md: -------------------------------------------------------------------------------- 1 | loong64-it 2 | === 3 | 4 | Quickly converts "old-world" `loongarch64` .deb packages to "new-world" 5 | `loong64` ones - this is useful for distributions such as Debian and deepin 6 | who insists that architecture names should be different between worlds. 7 | 8 | This aids users with libLoL-enabled `loong64` distributions in installing 9 | and using old-world applications such as Tencent QQ and WPS for Linux. 10 | 11 | Usage 12 | --- 13 | 14 | ``` 15 | loong64-it [PACKAGE1] [PACKAGE2] ... 16 | 17 | - PACKAGE{1..N}: Path to the old-world .deb package to convert. 18 | ``` 19 | 20 | Implementation 21 | --- 22 | 23 | The script does the following: 24 | 25 | - Examines the specified package file(s) as valid .deb package(s). 26 | - Using `ar`, extracts `control.tar*` for processing. 27 | - Records the suffix and compression method of the control archive 28 | such that they could be replaced in-place in the original .deb. 29 | - Examines and processes `control`, replacing the `Architecture:` field from 30 | `loongarch64` to `loong64`, where applicable (returns an error if said 31 | package also comes with a `loong64` (or anything other than the old/new- 32 | world pair) architecture key. 33 | - Repacks the `control.tar*` archive and replaces it in-place. 34 | - Instructs the user that the `.deb` file has been sucessfully processed 35 | and is ready to use (and recommends installing libLoL). 36 | -------------------------------------------------------------------------------- /loong64-it/loong64-it.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # A simple loongarch64 => loong64 .deb converter to help traverse the worlds. 3 | # 4 | # Mingcong Bai , 2024 5 | 6 | _display_usage() { 7 | printf "\ 8 | Usage: 9 | 10 | loong64-it [PACKAGE1] [PACKAGE2] ... 11 | 12 | - PACKAGE{1..N}: Path to the old-world .deb package to convert. 13 | 14 | " 15 | } 16 | 17 | # Autobuild-like echo functions. 18 | abwarn() { echo -e "[\e[33mWARN\e[0m]: \e[1m$*\e[0m"; } 19 | aberr() { echo -e "[\e[31mERROR\e[0m]: \e[1m$*\e[0m"; exit 1; } 20 | abinfo() { echo -e "[\e[96mINFO\e[0m]: \e[1m$*\e[0m"; } 21 | abdbg() { echo -e "[\e[32mDEBUG\e[0m]: \e[1m$*\e[0m"; } 22 | 23 | _convert_loong64() { 24 | abinfo "Examining package information: $1 ..." 25 | dpkg -I "$SRCDIR"/$1 || \ 26 | aberr "Invalid dpkg package: control (metadata) archive not found: $?" 27 | CONTROL_EXT="$(ar t "$SRCDIR"/$1 | grep control.tar* | cut -f3 -d'.')" 28 | case "${CONTROL_EXT}" in 29 | gz) 30 | TAR_COMP_FLAG="z" 31 | ;; 32 | xz) 33 | TAR_COMP_FLAG="J" 34 | ;; 35 | bz2) 36 | TAR_COMP_FLAG="j" 37 | ;; 38 | "") 39 | TAR_COMP_FLAG="" 40 | ;; 41 | *) 42 | aberr "Invalid control archive extension ${CONTROL_EXT}!" 43 | ;; 44 | esac 45 | 46 | abinfo "Unpacking: $1 ..." 47 | cd $(mktemp -d) || \ 48 | aberr "Failed to create temporary directory to unpack $1: $?." 49 | DEBDIR="$(pwd)" 50 | ar xv "$SRCDIR"/$1 || \ 51 | aberr "Failed to unpack $1: $?." 52 | 53 | abinfo "Unpacking metadata archive: $1 ..." 54 | mkdir "$DEBDIR"/metadata || \ 55 | aberr "Failed to create temporary directory for extracting the metdata archive from $1: $?." 56 | tar -C "$DEBDIR"/metadata -xvf control.tar."${CONTROL_EXT}" || \ 57 | aberr "Failed to unpack metadata archive from $1: $?." 58 | 59 | abinfo "Converting dpkg Architecture key: $1 ..." 60 | if ! egrep '^Architecture: loongarch64$' "$DEBDIR"/metadata/control; then 61 | aberr "Failed to detect a \"loongarch64\" architecture signature in control file - this is not a valid old-world LoongArch package!" 62 | fi 63 | sed -e 's|^Architecture: loongarch64$|Architecture: loong64|g' \ 64 | -i "$DEBDIR"/metadata/control 65 | 66 | abinfo "Building metadata archive (control.tar.${CONTROL_EXT}): $1 ..." 67 | cd "$DEBDIR"/metadata 68 | tar cvf${TAR_COMP_FLAG} "$DEBDIR"/control.tar."${CONTROL_EXT}" * || \ 69 | aberr "Failed to build metadata archive (control.tar.${CONTROL_EXT}) for $1: $?." 70 | cd "$DEBDIR" 71 | 72 | abinfo "Rebuilding dpkg package $1: loong64 ..." 73 | ar rv "$SRCDIR"/$1 control.tar.${CONTROL_EXT} || \ 74 | aberr "Failed to rebuild dpkg package $1: $?." 75 | 76 | #abinfo "Cleaning up: $1 ..." 77 | #rm -r "$DEBDIR" 78 | 79 | abinfo """Your requested package: 80 | 81 | $1 82 | 83 | Has been successfully converted as a loong64 package! 84 | 85 | However, you may still need to install libLoL for old-world applications to 86 | work properly. Please refer to the libLoL home page: 87 | 88 | https://liblol.aosc.io 89 | 90 | For details on how to install and configure libLoL. 91 | """ 92 | } 93 | 94 | # Display usage info if `-h' or `--help' is specified. 95 | if [[ "$1" == "-h" || "$1" == "--help" ]]; then 96 | _display_usage 97 | exit 0 98 | fi 99 | 100 | # Display usage info with directions if no option is specified. 101 | if [ -z "$1" ]; then 102 | abwarn "Please specify package(s) to convert.\n" 103 | _display_usage 104 | exit 1 105 | fi 106 | 107 | # Record working directory. 108 | SRCDIR="$(pwd)" 109 | 110 | # Rebuilding all requested packages. 111 | for i in "$@"; do 112 | _convert_loong64 $i 113 | done 114 | -------------------------------------------------------------------------------- /loong64-it/loongarch64-it.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # A simple loong64 => loongarch64 .deb converter to help traverse the worlds. 3 | # 4 | # Mingcong Bai , 2024 5 | 6 | _display_usage() { 7 | printf "\ 8 | Usage: 9 | 10 | loong64-it [PACKAGE1] [PACKAGE2] ... 11 | 12 | - PACKAGE{1..N}: Path to the new-world .deb package to convert. 13 | 14 | " 15 | } 16 | 17 | # Autobuild-like echo functions. 18 | abwarn() { echo -e "[\e[33mWARN\e[0m]: \e[1m$*\e[0m"; } 19 | aberr() { echo -e "[\e[31mERROR\e[0m]: \e[1m$*\e[0m"; exit 1; } 20 | abinfo() { echo -e "[\e[96mINFO\e[0m]: \e[1m$*\e[0m"; } 21 | abdbg() { echo -e "[\e[32mDEBUG\e[0m]: \e[1m$*\e[0m"; } 22 | 23 | _convert_loong64() { 24 | abinfo "Examining package information: $1 ..." 25 | dpkg -I "$SRCDIR"/$1 || \ 26 | aberr "Invalid dpkg package: control (metadata) archive not found: $?" 27 | CONTROL_EXT="$(ar t "$SRCDIR"/$1 | grep control.tar* | cut -f3 -d'.')" 28 | case "${CONTROL_EXT}" in 29 | gz) 30 | TAR_COMP_FLAG="z" 31 | ;; 32 | xz) 33 | TAR_COMP_FLAG="J" 34 | ;; 35 | bz2) 36 | TAR_COMP_FLAG="j" 37 | ;; 38 | "") 39 | TAR_COMP_FLAG="" 40 | ;; 41 | *) 42 | aberr "Invalid control archive extension ${CONTROL_EXT}!" 43 | ;; 44 | esac 45 | 46 | abinfo "Unpacking: $1 ..." 47 | cd $(mktemp -d) || \ 48 | aberr "Failed to create temporary directory to unpack $1: $?." 49 | DEBDIR="$(pwd)" 50 | ar xv "$SRCDIR"/$1 || \ 51 | aberr "Failed to unpack $1: $?." 52 | 53 | abinfo "Unpacking metadata archive: $1 ..." 54 | mkdir "$DEBDIR"/metadata || \ 55 | aberr "Failed to create temporary directory for extracting the metdata archive from $1: $?." 56 | tar -C "$DEBDIR"/metadata -xvf control.tar."${CONTROL_EXT}" || \ 57 | aberr "Failed to unpack metadata archive from $1: $?." 58 | 59 | abinfo "Converting dpkg Architecture key: $1 ..." 60 | if ! egrep '^Architecture: loong64$' "$DEBDIR"/metadata/control; then 61 | aberr "Failed to detect a \"loong64\" architecture signature in control file - this is not a valid new-world LoongArch package!" 62 | fi 63 | sed -e 's|^Architecture: loong64$|Architecture: loongarch64|g' \ 64 | -i "$DEBDIR"/metadata/control 65 | 66 | abinfo "Building metadata archive (control.tar.${CONTROL_EXT}): $1 ..." 67 | cd "$DEBDIR"/metadata 68 | tar cvf${TAR_COMP_FLAG} "$DEBDIR"/control.tar."${CONTROL_EXT}" * || \ 69 | aberr "Failed to build metadata archive (control.tar.${CONTROL_EXT}) for $1: $?." 70 | cd "$DEBDIR" 71 | 72 | abinfo "Rebuilding dpkg package $1: loongarch64 ..." 73 | ar rv "$SRCDIR"/$1 control.tar.${CONTROL_EXT} || \ 74 | aberr "Failed to rebuild dpkg package $1: $?." 75 | 76 | #abinfo "Cleaning up: $1 ..." 77 | #rm -r "$DEBDIR" 78 | 79 | abinfo """Your requested package: 80 | 81 | $1 82 | 83 | Has been successfully converted as a loongarch64 package! 84 | """ 85 | } 86 | 87 | # Display usage info if `-h' or `--help' is specified. 88 | if [[ "$1" == "-h" || "$1" == "--help" ]]; then 89 | _display_usage 90 | exit 0 91 | fi 92 | 93 | # Display usage info with directions if no option is specified. 94 | if [ -z "$1" ]; then 95 | abwarn "Please specify package(s) to convert.\n" 96 | _display_usage 97 | exit 1 98 | fi 99 | 100 | # Record working directory. 101 | SRCDIR="$(pwd)" 102 | 103 | # Rebuilding all requested packages. 104 | for i in "$@"; do 105 | _convert_loong64 $i 106 | done 107 | -------------------------------------------------------------------------------- /make-jdk-tarball-new.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | REPO_SLUG='aarch64-port/jdk8u-shenandoah' 3 | REPO='jdk' 4 | JDK_VER='8u262-b09' 5 | TAG_NAME="aarch64-shenandoah-jdk${JDK_VER}-shenandoah-merge-2020-07-03" 6 | 7 | # 1: REPO_SLUG 2: i (component) 3: TAG_NAME 4: JDK_VER 5: extra wget args 8 | function download_single_comp() { 9 | echo "${2}: Downloading tarball ..." 10 | rm -f "${2}-jdk${4}.tar.gz" 11 | wget "$5" "http://hg.openjdk.java.net/${1}/${2}/archive/${3}.tar.gz" \ 12 | -O "${2}-jdk${4}.tar.gz" 13 | echo "${2}: Download completed." 14 | } 15 | 16 | function download_jdk_src() { 17 | wget http://hg.openjdk.java.net/${REPO_SLUG}/archive/${TAG_NAME}.tar.gz \ 18 | -O jdk8u-jdk${JDK_VER}.tar.gz 19 | 20 | if ! command -v parallel; then 21 | echo "[!] Not using parallel to download jdk." 22 | for i in corba hotspot jdk jaxws jaxp langtools nashorn; do 23 | download_single_comp "${REPO_SLUG}" "${i}" "${TAG_NAME}" "${JDK_VER}" "--" 24 | done 25 | else 26 | echo "[+] Using parallel to download jdk." 27 | export -f download_single_comp 28 | parallel --lb download_single_comp "${REPO_SLUG}" ::: corba hotspot jdk jaxws jaxp langtools nashorn \ 29 | ::: "${TAG_NAME}" ::: "${JDK_VER}" ::: "-q" 30 | fi 31 | 32 | for i in *.tar.gz; do 33 | echo "Decompressing ${i}..." 34 | tar xf "$i" 35 | done 36 | 37 | mv "$(basename "${REPO_SLUG}")-${TAG_NAME}" openjdk-${JDK_VER}/ 38 | cd openjdk-${JDK_VER}/ || exit 2 39 | for i in corba hotspot jdk jaxws jaxp langtools nashorn; do 40 | mv ../"${i}-${TAG_NAME}" ${i} 41 | done 42 | cd .. || exit 2 43 | } 44 | 45 | download_jdk_src 46 | 47 | if ! which pixz > /dev/null 2>&1; then 48 | echo "Compressing final tarball..." 49 | tar cf - openjdk-${JDK_VER}/ | xz -T0 > openjdk-${JDK_VER/-b/b}.tar.xz 50 | else 51 | echo "Compressing final tarball using pixz..." 52 | tar -Ipixz -cf openjdk-${JDK_VER/-b/b}.tar.xz openjdk-${JDK_VER}/ 53 | fi 54 | -------------------------------------------------------------------------------- /make-rebuilds-list/.gitignore: -------------------------------------------------------------------------------- 1 | /.vscode -------------------------------------------------------------------------------- /make-rebuilds-list/README.md: -------------------------------------------------------------------------------- 1 | # make-rebuilds-list 2 | Make package rebuilds list 3 | 4 | ## Usage 5 | 6 | 1. Install requests: 7 | 8 | ``` 9 | pip3 --user install requests 10 | ``` 11 | 12 | 2. Copy make-rebuilds-list and run: 13 | 14 | ``` 15 | cp /path/to/make-rebuilds-list /path/to/abbs-tree 16 | cd /path/to/abbs-tree 17 | make-rebuilds-list PACKAGE_NAME 18 | ``` -------------------------------------------------------------------------------- /make-rebuilds-list/make-rebuilds-list: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import requests 5 | import os 6 | 7 | 8 | def get_rebuilds_list(package_name: str) -> list: 9 | response = requests.get( 10 | "https://packages.aosc.io/revdep/{}?type=json".format(package_name)) 11 | response.raise_for_status() 12 | package_info = response.json() 13 | rebuilds_list = [] 14 | rebuilds_list = [package for group in package_info["sobreaks"] 15 | for package in group if package not in rebuilds_list] 16 | if package_info["sobreaks_circular"] != None: 17 | rebuilds_list += package_info["sobreaks_circular"] 18 | if len(rebuilds_list) == 0: 19 | print("Package {} has nothing to rebuild!".format(package_name)) 20 | exit(0) 21 | 22 | # remove +32 packages 23 | rebuilds_list = [i for i in rebuilds_list if not i.endswith("+32")] 24 | 25 | # remove transitional packages/latx 26 | transitional_pkgs = ["gst-plugins-good-1-0", "latx"] 27 | rebuilds_list = [i for i in rebuilds_list if i not in transitional_pkgs] 28 | 29 | print("Packages to rebuild:", ", ".join(rebuilds_list)) 30 | return rebuilds_list 31 | 32 | 33 | def gen_rebuilds_list_string(rebuilds_list: list) -> str: 34 | rebuilds_path_list = [search_package_path(i) for i in rebuilds_list] 35 | rebuilds_path_list = [ 36 | i for i in rebuilds_path_list if i != None and len(i.split('/')) == 2] 37 | return "\n".join(sorted(rebuilds_path_list)) + "\n" 38 | 39 | 40 | def search_package_path(package_name: str) -> str: 41 | with os.scandir(".") as dir1: 42 | for section in dir1: 43 | if section.is_dir() and not section.name.startswith('.'): 44 | with os.scandir(section) as dir2: 45 | for package in dir2: 46 | if package.is_dir() and os.path.isdir(os.path.join(package, "autobuild")): 47 | if package.name == package_name: 48 | return package.path[2:] 49 | else: 50 | continue 51 | 52 | # search subpackage, like arch-install-scripts/01-genfstab 53 | path = package 54 | if os.path.isdir(path) and section.name != "groups": 55 | with os.scandir(path) as dir3: 56 | for subpackage in dir3: 57 | if subpackage.name != "autobuild" and subpackage.is_dir(): 58 | try: 59 | with open(os.path.join(subpackage, "defines"), "r") as f: 60 | defines = f.readlines() 61 | except: 62 | with open(os.path.join(subpackage, "autobuild/defines"), "r") as f: 63 | defines = f.readlines() 64 | finally: 65 | for line in defines: 66 | if "PKGNAME=" in line and ("{}\n".format(package_name) == line[8:] or "\"{}\"\n".format(package_name) == line[8:]): 67 | return package.path[2:] 68 | 69 | 70 | def write_string_to_file(package_name: str, rebuilds_path_list_str: str) -> None: 71 | with open('groups/{}-rebuilds'.format(package_name), "w") as f: 72 | f.write(rebuilds_path_list_str) 73 | print('groups/{}-rebuilds created!'.format(package_name)) 74 | 75 | 76 | def main(): 77 | if len(sys.argv) != 2: 78 | print("Usage: make-rebuilds-list PACKAGE_NAME") 79 | exit(1) 80 | package_name = sys.argv[1] 81 | request_list = get_rebuilds_list(package_name) 82 | rebuilds_path_list_str = gen_rebuilds_list_string(request_list) 83 | write_string_to_file(package_name, rebuilds_path_list_str) 84 | 85 | 86 | if __name__ == '__main__': 87 | main() 88 | -------------------------------------------------------------------------------- /make-video-samples/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /make-video-samples/README.md: -------------------------------------------------------------------------------- 1 | make-video-samples.bash 2 | === 3 | 4 | Converts sample video files for AOSC OS hardware validation. 5 | 6 | Usage 7 | --- 8 | 9 | ``` 10 | $0 [SAMPLE_FILE] 11 | 12 | - [SAMPLE_FILE]: Sample video file to convert. 13 | -h, --help: Displays this help message. 14 | ``` 15 | -------------------------------------------------------------------------------- /make-video-samples/make-video-samples.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Makes sample videos from an original source. 3 | # 4 | # Part of the AOSC OS hardware validation project (name pending). 5 | 6 | _help_message() { 7 | printf "\ 8 | $0: Converts sample video files for AOSC OS hardware validation. 9 | 10 | Usage: $0 [SAMPLE_FILE] 11 | 12 | - [SAMPLE_FILE]: Sample video file to convert. 13 | -h, --help: Displays this help message. 14 | 15 | " 16 | } 17 | 18 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 19 | _help_message 20 | exit 0 21 | fi 22 | 23 | if [ -z "$1" ]; then 24 | echo -e "[!!!] Please specify a sample video file to convert.\n" 25 | _help_message 26 | exit 1 27 | fi 28 | 29 | _convert() { 30 | # Standard parameters: 31 | # $1: Input filename 32 | # $2: Video encoding 33 | # $3: Output resolution (x:y) 34 | # $4: Output frames-per-second 35 | # $5: Output filename. 36 | # 37 | # Extra options: 38 | # -an: Remove audio. 39 | # -y: Non-interactive mode for scripting. 40 | ffmpeg \ 41 | -y \ 42 | -i "$1" \ 43 | -c:v "$2" \ 44 | -filter:v "scale=${3},fps=${4}" \ 45 | -an \ 46 | "$5" 47 | } 48 | 49 | # Note: FFmpeg does not support wmv3 encoding whilst wmv2 is still valid as 50 | # an encoder to produce VC-1 files for testing. 51 | for _encoding in libsvtav1 libvpx-vp8 libvpx-vp9 libx264 libx265 mpeg2 wmv2; do 52 | case "$_encoding" in 53 | libsvtav1) 54 | _encoder_name="av1" 55 | _output_suffix="mp4" 56 | ;; 57 | libvpx-vp8) 58 | _encoder_name="vp8" 59 | _output_suffix="webm" 60 | ;; 61 | libvpx-vp9) 62 | _encoder_name="vp9" 63 | _output_suffix="webm" 64 | ;; 65 | libx264) 66 | _encoder_name="avc" 67 | _output_suffix="mp4" 68 | ;; 69 | libx265) 70 | _encoder_name="hevc" 71 | _output_suffix="mp4" 72 | ;; 73 | mpeg2) 74 | _encoder_name="mpeg2" 75 | _output_suffix="mpg" 76 | ;; 77 | wmv2) 78 | _encoder_name="vc1" 79 | _output_suffix="wmv" 80 | ;; 81 | esac 82 | for _resolution in 3840x2160 1920x1080; do 83 | case "$_resolution" in 84 | 3840x2160) 85 | _resolution_name="4k" 86 | ;; 87 | 1920x1080) 88 | _resolution_name="1080p" 89 | ;; 90 | esac 91 | for _framerate in 60 30; do 92 | _convert \ 93 | "$1" "$_encoding" "${_resolution/x/:}" "$_framerate" \ 94 | sample-"${_encoder_name}"-"${_resolution_name}""${_framerate}"."$_output_suffix" 95 | done 96 | done 97 | done 98 | -------------------------------------------------------------------------------- /mkchkupdate: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import requests 4 | import os 5 | import argparse 6 | 7 | 8 | def main(): 9 | parser = argparse.ArgumentParser( 10 | description="mkchkupdate") 11 | parser.add_argument("-t", "--total", type=int, 12 | default=5, help="total lint") 13 | parser.add_argument('packages', metavar='N', type=str, nargs='+', 14 | help='packages') 15 | args = parser.parse_args() 16 | packages = args.packages 17 | total = args.total 18 | for i in packages: 19 | print("{}:".format(i)) 20 | r = requests.get( 21 | "https://release-monitoring.org/api/projects/?pattern={}".format(i)) 22 | d = r.json() 23 | print("Anitya:") 24 | index = 1 25 | if d["total"] != 0: 26 | for j in d["projects"]: 27 | if index <= total: 28 | print("Name: {}, Lastest Version: {}, CHKUPDATE: anitya::id={}, Homepage: {}".format( 29 | j["name"], j["stable_versions"][0] if len(j["stable_versions"]) != 0 else "None", j["id"], j["homepage"] if j["homepage"] else "None")) 30 | index += 1 31 | else: 32 | break 33 | srcs = [] 34 | print("Github:") 35 | package_path = search_package_path(i) 36 | if package_path: 37 | with open("{}/spec".format(package_path)) as f: 38 | spec = f.readlines() 39 | for j in spec: 40 | if "SRCS=" in j: 41 | if len(j.split("::")) > 1: 42 | srcs += j.split("::")[1][:-1].split('\n') 43 | else: 44 | srcs += j[:-1].split('\n') 45 | for k in srcs: 46 | if "github" in k: 47 | split_k = k.split("/") 48 | print( 49 | "CHKUPDATE: github::repo={}/{}".format(split_k[3], split_k[4])) 50 | 51 | 52 | def search_package_path(package_name: str) -> str: 53 | with os.scandir(".") as dir1: 54 | for section in dir1: 55 | if section.is_dir() and not section.name.startswith('.'): 56 | with os.scandir(section) as dir2: 57 | for package in dir2: 58 | if package.name == package_name and package.is_dir() and os.path.isdir( 59 | os.path.join(package, "autobuild")): 60 | return package.path[2:] 61 | # search subpackage, like arch-install-scripts/01-genfstab 62 | path = package 63 | if os.path.isdir(path) and section.name != "groups": 64 | with os.scandir(path) as dir3: 65 | for subpackage in dir3: 66 | if subpackage.name != "autobuild" and subpackage.is_dir(): 67 | try: 68 | with open(os.path.join(subpackage, "defines"), "r") as f: 69 | defines = f.readlines() 70 | except: 71 | with open(os.path.join(subpackage, "autobuild/defines"), "r") as f: 72 | defines = f.readlines() 73 | finally: 74 | for line in defines: 75 | if "PKGNAME=" in line and package_name in line: 76 | return package.path[2:] 77 | 78 | 79 | if __name__ == "__main__": 80 | main() 81 | -------------------------------------------------------------------------------- /mkfile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # (c) Mingye Wang 3 | # 2016-02-25 @ https://github.com/AOSC-Dev/scriptlets/blob/master/mkfile.sh 4 | # (c) Steve Parker, http://steve-parker.org/ 5 | # 2010-03-19 @ http://steve-parker.org/code/sh/mkfile.sh.txt 6 | # Licensed under the GPL Version 2 7 | 8 | # wrapper for dd and fallocate to act like Solaris' mkfile utility. 9 | 10 | die(){ printf 'mkfile: error: %s' "$1"; exit "${2-1}"; } 11 | 12 | usage() 13 | { 14 | echo "\ 15 | Usage: mkfile [ -qnvp ] [-m MODE] [ -i INFILE ] [ -b BS ] size[bkgtpe] FILES... 16 | mkfile -? 17 | 18 | blocksize is 1048576 bytes by default. By default, the remainder from size%bs 19 | is added with an additional dd pass in copy mode. 20 | 21 | The \`b' suffix denotes block numbers; for compatibility, the default blocksize 22 | used for this calculation is 512." 23 | } 24 | 25 | usage_more(){ 26 | usage 27 | echo "Options: 28 | -q Turn off verbose output. This is the default. 29 | -v Turn on verbose output (looks like '{fn} {n}bytes @ {mode} -> {exit}') 30 | -b BS Sets blocksize for copy/pipe as well as for size calculation. 31 | -i FILE Sets the input file for dd (\`copy' mode only). 32 | -m MODE Sets the operation mode. Possible values are: 33 | copy dd-from-infile. Creates a regular file, slow. Default. 34 | seek dd-seek. Creates a sparse file (bad for swap), fast. See -n. 35 | alloc fallocate. Creates a regular file, fast. 36 | redir printf-pat-redir. Creates a regular file, slow. 37 | 38 | -p PAT The byte pattern to use in pipe mode. Do not use NUL bytes. 39 | 40 | ". 41 | } 42 | 43 | humanreadable () 44 | { 45 | multiplier=1 46 | case $1 in 47 | *b) multiplier=$hbs ;; # blocks (tricky, right?) 48 | *k) multiplier=$((1<<10)) ;; 49 | *m) multiplier=$((1<<20)) ;; 50 | *g) multiplier=$((1<<30)) ;; 51 | # warning: extended-precision POSIX shells only (e.g. bash on amd64) 52 | *t) multiplier=$((1<<40)) ;; 53 | *p) multiplier=$((1<<50)) ;; 54 | *e) multiplier=$((1<<60)) ;; 55 | # for z and y, consider 56 | esac 57 | numeric=${1%[bkmgtpe]} 58 | printf "$((multiplier * numeric))" 59 | } 60 | 61 | bs=1048576 # bigger, better, faster! 62 | hbs=512 # the bs used for humanreadable, in order to respect 'mkfile'. 63 | quiet=0 64 | method=copy # copy/seek/alloc 65 | noremain=0 66 | verbose=0 67 | keepgoing=0 68 | INFILE=/dev/zero 69 | 70 | while getopts 'i:b:m:qnavr?' opt 71 | do 72 | case $opt in 73 | b) bs=$OPTARG hbs=$bs ;; 74 | i) INFILE=$OPTARG;; 75 | q) verbose=0 ;; 76 | k) keepgoing=1 ;; 77 | n) method=seek ;; # dd seek-only 78 | m) method=optarg ;; 79 | r) noremain=1 ;; # ignore (size % bs) difference 80 | v) verbose=1 ;; # %s %llu bytes stdout; METHOD stderr; \n stdout. 81 | \?) usage_more; exit ;; 82 | *) usage; exit 2 ;; 83 | esac 84 | done 85 | 86 | shift $((OPTIND-2)) 87 | 88 | 89 | if [ -z "$1" ]; then 90 | die "No size specificed" 91 | fi 92 | if [ -z "$2" ]; then 93 | echo "ERROR: No filename specificed" 94 | fi 95 | 96 | SIZE=`humanreadable $1` || die "Invalid 97 | FILENAME="$2" 98 | 99 | BS=`humanreadable $bs` 100 | 101 | COUNT=`expr $SIZE / $BS` 102 | CHECK=`expr $COUNT \* $BS` 103 | if [ "$CHECK" -ne "$SIZE" ]; then 104 | echo "Warning: Due to the blocksize requested, the file created will be `expr $COUNT \* $BS` bytes and not $SIZE bytes" 105 | fi 106 | 107 | echo -en "Creating $SIZE byte file $FILENAME...." 108 | 109 | dd if=$INFILE bs=$BS count=$COUNT of="$FILENAME" 2>/dev/null 110 | ddresult=$? 111 | if [ "$quiet" -ne "1" ]; then 112 | # We all know that you're goint to do this next - let's do it for you: 113 | if [ "$ddresult" -eq "0" ]; then 114 | echo "Finished:" 115 | else 116 | echo "An error occurred. dd returned code $ddresult." 117 | fi 118 | ls -l "$FILENAME" && ls -lh "$FILENAME" 119 | fi 120 | 121 | exit $ddresult 122 | -------------------------------------------------------------------------------- /mkpkgbreak/README.md: -------------------------------------------------------------------------------- 1 | # make-pkgbreak 2 | Make package break list 3 | 4 | ## Usage 5 | 6 | 1. Install requests: 7 | 8 | ``` 9 | pip3 --user install requests 10 | ``` 11 | 12 | 2. Run: 13 | 14 | ``` 15 | $ make-pkgbreak samba 16 | PKGBREAK="acccheck<=0.2.1-1 caja-extensions<=1.24.0-1 cifs-utils<=6.10-1 \ 17 | edlaunch-rs<=0.4.7 ffmpeg<=4.2.4-6 gnome-control-center<=3.38.2-1 \ 18 | gnome-vfs<=2.24.4-8 gvfs<=1.46.1-1 kdenetwork-filesharing<=21.04.0 \ 19 | kio-extras<=21.04.0 kodi<=1:19.0-1 mpd<=0.21.26-1 mplayer<=1:1.4-6 \ 20 | mpv<=0.33.1 nemo-extensions<=4.8.0+git20210203 pysmbc<=1.0.22-1 \ 21 | sssd<=2.4.0-1 tdebase<=14.0.7-6 thunar-shares-plugin<=0.3.1-1 \ 22 | vlc<=3.0.12-1 wine<=3:6.7 xine-lib<=1.2.10-3" 23 | # write to samba/autobuild/defines: 24 | # make-pkgbreak samba >> /path/to/abbs-tree/extra-network/samba/autobuild/defines 25 | ``` 26 | -------------------------------------------------------------------------------- /mkpkgbreak/mkpkgbreak: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import requests 4 | import sys 5 | 6 | 7 | def get_pkgbreak_list(package_name: str) -> list: 8 | response = requests.get( 9 | "https://packages.aosc.io/revdep/{}?type=json".format(package_name)) 10 | response.raise_for_status() 11 | package_info = response.json() 12 | pkgbreak_list = [] 13 | pkgbreak_list = [package for group in package_info["sobreaks"] 14 | for package in group if package not in pkgbreak_list] 15 | if package_info["sobreaks_circular"] != None: 16 | pkgbreak_list += package_info["sobreaks_circular"] 17 | if len(pkgbreak_list) == 0: 18 | print("Package {} has nothing to break!".format(package_name)) 19 | exit(0) 20 | return pkgbreak_list 21 | 22 | 23 | def get_package_version(package_name: str) -> str: 24 | response = requests.get( 25 | "https://packages.aosc.io/packages/{}?type=json".format(package_name)) 26 | response.raise_for_status() 27 | package_info = response.json() 28 | package_section = "{}-{}".format( 29 | package_info["pkg"]["category"], package_info["pkg"]["section"]) 30 | package_directory_name = package_info["pkg"]["directory"] 31 | response = requests.get( 32 | "https://raw.githubusercontent.com/AOSC-Dev/aosc-os-abbs/stable/{}/{}/spec".format( 33 | package_section, package_directory_name)) 34 | response.raise_for_status() 35 | package_spec = response.text.split("\n") 36 | for i in package_spec: 37 | if i.startswith("VER="): 38 | package_version = i.replace("VER=", "") 39 | return package_version 40 | 41 | 42 | def gen_pkgbreak_string(break_list: list) -> str: 43 | max_line_size = 68 44 | pkgbreak_list = ["{}<={}".format( 45 | package, get_package_version(package)) for package in break_list] 46 | buffer = [] 47 | buffer2 = [] 48 | for i in pkgbreak_list: 49 | buffer.append(i) 50 | if len(" ".join(buffer)) < max_line_size: 51 | buffer2.append(i) 52 | else: 53 | buffer = [i] 54 | buffer2.append("\\\n") 55 | buffer2.append(" " * 9 + " ".join(buffer)) 56 | return "PKGBREAK=\"{}\"".format(" ".join(buffer2)) 57 | 58 | 59 | def main(): 60 | if len(sys.argv) != 2: 61 | print("Usage: make-pkgbreak PACKAGENAME") 62 | exit(1) 63 | package_name = sys.argv[1] 64 | break_list = get_pkgbreak_list(package_name) 65 | print(gen_pkgbreak_string(break_list)) 66 | 67 | 68 | if __name__ == "__main__": 69 | main() 70 | -------------------------------------------------------------------------------- /mkreleasetorrent.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # mkreleasetorrent.sh: Creates a torrent file for AOSC OS releases, along 3 | # with the web seeds. 4 | # uses mktorrent. https://github.com/esmil/mktorrent 5 | MIRROR_LIST='http://' # github.com/AOSC-Dev/Homepage/.status/sites/, make it a tarball. 6 | PUB_TRACKERS='udp://tracker.openbittorrent.com:80/announce \ 7 | udp://tracker.publicbt.com:80/announce \ 8 | udp://trk.obtracker.net:2710/announce' 9 | MIRROR='~/mirror' 10 | 11 | # first-time generate 12 | if [ -e ~/.torrentRel ]; then 13 | curl "$MIRROR_LIST" | tar -xzf && 14 | find sites | while read site; do 15 | . "$site" 16 | echo "'$SITE' " >> ~/.torrentRel 17 | done 18 | fi 19 | trackers(){ for i in $PUB_TRACKERS; do echo "-a $SITE "; done; } 20 | relpath(){ python -c "import os.path; print os.path.relpath('$1','${2:-$PWD}')" ; } 21 | webseeds(){ while read line; do echo "-w $line"; done < ~/.torrentRel; } 22 | 23 | target="$(relpath $1)" 24 | cd $MIRROR # make sure we have control over the name. Alternative solution: put the path into 25 | # webseed `root'. 26 | # cp -asr "$1" /tmp/torrel/"$target" # isolate 27 | # cd "$1/torrel" 28 | mktorrent "$target" -n "$target" -c "${comment=AOSC Release $name}" \ 29 | -o"$MIRROR/torrent-releases/$(basename $target).torrent" $(trackers) $(webseeds) -l 22 30 | 31 | # For Multi-File torrents, this gets a bit more interesting. Normally, BitTorrent clients use the "name" 32 | # from the .torrent info section to make a folder, then use the "path/file" items from the info section 33 | # within that folder. For the case of Multi-File torrents, the 'url-list' should be a root folder 34 | # where a client could add the same "name" and "path/file" to create the URL for the request. 35 | # http://getright.com/seedtorrent.html 36 | -------------------------------------------------------------------------------- /mkrfr/mkrfr: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0 3 | set -Eeuo pipefail 4 | trap 'errorHandler "$?" "${FUNCNAME[0]}" "$LINENO"' ERR 5 | 6 | error() { 7 | echo -e "\e[0;31m[ERROR] $*\e[0m" >&2 8 | } 9 | 10 | warn() { 11 | echo -e "\e[0;33m[WARN ] $*\e[0m" >&2 12 | } 13 | 14 | die() { 15 | error "$*" 16 | exit 1 17 | } 18 | 19 | log() { 20 | echo -e "\e[0;32m$*\e[0m" >&2 21 | } 22 | 23 | errorHandler() { 24 | echo -e "\e[0;31m[BUG] Line $3 ($2): $1\e[0m" >&2 25 | exit "$1" 26 | } 27 | 28 | result="" 29 | 30 | while read -r pr; do 31 | number="$(jq -r '.number' <<<"$pr")" 32 | title="$(jq -r '.title' <<<"$pr")" 33 | url="$(jq -r '.url' <<<"$pr")" 34 | 35 | buildCount="$(jq -r '.statusCheckRollup | length' <<<"$pr")" 36 | if ((buildCount == 0)); then 37 | warn "PR $number ($title) has not been built by BuildIt!" 38 | fi 39 | buildFailCount="$(jq -r '.statusCheckRollup | map(select(.conclusion != "SUCCESS")) | length' <<<"$pr")" 40 | if ((buildFailCount != 0)); then 41 | warn "PR $number ($title) has some failed build jobs" 42 | fi 43 | reviewCount="$(jq -r '.latestReviews | length' <<<"$pr")" 44 | if ((reviewCount != 0)); then 45 | log "PR $number ($title) already has latest reviews" 46 | continue 47 | fi 48 | 49 | title="${title/: update to / }" 50 | log "Adding: PR $number ($title)" 51 | result+="$number ($title) $url 52 | " 53 | done < <( 54 | gh pr list --repo 'AOSC-Dev/aosc-os-abbs' --author '@me' --search 'is:open draft:false comments:>=1 -label:flight -label:preview -label:pre-release -label:blocked' --json 'number,url,title,statusCheckRollup,latestReviews' | jq -ac '.[]' 55 | ) 56 | 57 | printf 'RFR:\n' 58 | echo -n "$result" | sort 59 | -------------------------------------------------------------------------------- /mksvgspcimen.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | usage(){ 3 | echo "USAGE: $0 font-family color-up color-dn [fontname] [favouriteword]"; exit 1; 4 | } 5 | # This script is relased under CC0. 6 | ## SAMPLE: 7 | # mksvgspcimen.sh Iosevka \#8888ff \#4e4eff "  [ˈmæs.tɚ]" > Iosevka_todo_topath.svg 8 | : ${font_family=$1} ${color_up=$2} ${color_dn=$3} ${fontname=${4:-${font_family}}} ${favouriteword=${5:-Lorem Ipsum}} 9 | 10 | echo "mksvgspeciman 0.1 11 | 12 | CMDLINE $(printf '%q ' "$0" "$@") 13 | FAMILY ${font_family} 14 | NAME ${fontname} 15 | UP ${color_up} 16 | DN ${color_dn} 17 | WORD ${favouriteword} 18 | ">&2 19 | 20 | [ "$3" ] || usage 21 | 22 | echo " 23 | 24 | 25 | 26 | 27 | ${fontname} 28 | Aa Ee Rr 29 | Bb Gg Jj 30 | ${favouriteword} 31 | abcdefghijklmnopqrstuvwxyz 32 | A 33 | 0123456789 34 | 35 | 36 | " 37 | _ret=$? 38 | if [ $_ret == 0 ]; then 39 | echo "Done. Convert the text to paths and do other necessary adjustments in your svg editor.">&2 40 | else 41 | echo "cat: $_ret" 42 | fi 43 | -------------------------------------------------------------------------------- /mktransitionpac/mktransitionpac: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0 3 | set -Eeuo pipefail 4 | trap 'errorHandler "$?" "${FUNCNAME[0]}" "$LINENO"' ERR 5 | 6 | error() { 7 | echo -e "\e[0;31m[ERROR] $*\e[0m" >&2 8 | } 9 | 10 | die() { 11 | error "$*" 12 | exit 1 13 | } 14 | 15 | log() { 16 | echo -e "\e[0;32m$*\e[0m" >&2 17 | } 18 | 19 | errorHandler() { 20 | echo -e "\e[0;31m[BUG] Line $3 ($2): $1\e[0m" >&2 21 | exit "$1" 22 | } 23 | 24 | getPkgDir() { 25 | local path 26 | path="$(find . -mindepth 2 -maxdepth 2 -type d -name "$1" -print -quit)" 27 | echo "${path#./}" 28 | } 29 | 30 | getPkgEpoch() { 31 | ( 32 | # shellcheck source=/dev/null 33 | source "$1"/autobuild/defines 34 | echo "${PKGEPOCH:-0}" 35 | ) 36 | } 37 | 38 | getPkgSec() { 39 | ( 40 | # shellcheck source=/dev/null 41 | source "$1"/autobuild/defines 42 | echo "${PKGSEC:?}" 43 | ) 44 | } 45 | 46 | mkTransitionalPackage() { 47 | local transPac="$1" newPac="$2" 48 | local pkgSection 49 | pkgSection="$(dirname "$(getPkgDir "$2")")" 50 | 51 | local pkgEpoch=1 52 | local pkgSec 53 | pkgSec="$(getPkgSec "$pkgSection/$newPac")" 54 | 55 | if [[ -e "$pkgSection/$transPac" ]]; then 56 | log "[$1 -> $2] Removing old package ..." 57 | pkgEpoch="$(getPkgEpoch "$pkgSection/$transPac")" 58 | ((pkgEpoch++)) || true 59 | rm -rf "${pkgSection:?}/$transPac" 60 | fi 61 | 62 | log "[$1 -> $2] Creating package (epoch $pkgEpoch) ..." 63 | mkdir "$pkgSection/$transPac" 64 | mkdir "$pkgSection/$transPac/autobuild" 65 | cat >"$pkgSection/$transPac/spec" <"$pkgSection/$transPac/autobuild/build" <"$pkgSection/$transPac/autobuild/defines" < $2] Committing ..." 83 | git add "$pkgSection/$transPac" 84 | git commit -m "$transPac: transitionalise for $newPac" \ 85 | --trailer X-AOSC-mktransitionpac-transition-package="$transPac" \ 86 | --trailer X-AOSC-mktransitionpac-new-package="$newPac" \ 87 | --trailer X-AOSC-mktransitionpac-directory="$pkgSection" \ 88 | --trailer X-AOSC-mktransitionpac-section="$pkgSec" \ 89 | --trailer X-AOSC-mktransitionpac-epoch="$pkgEpoch" \ 90 | -- "$pkgSection/$transPac" 91 | 92 | local commitLog 93 | commitLog="$(git -c core.abbrev=16 \ 94 | log HEAD \ 95 | --oneline -1 --no-decorate --color=always)" 96 | log "[$1 -> $2] $commitLog" 97 | 98 | log "[$1 -> $2] SUCCESS" 99 | } 100 | 101 | if [[ $# != 2 ]]; then 102 | cat < 104 | EOF 105 | exit 106 | fi 107 | 108 | if ! mkTransitionalPackage "$1" "$2"; then 109 | error "[$1 -> $2] FAILED" 110 | fi 111 | -------------------------------------------------------------------------------- /msgtac.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | die(){ echo "$1"; exit "${2-1}"; } 3 | ## CC0 4 | usage(){ 5 | echo "Usage: $0 POFILE [OUTPREFIX] 6 | Creates a pair of po files with the same messages in reverse order. \`-' as 7 | stdin is accepted as a special case. 8 | 9 | Please be warned that this thing expects blank lines between messages. 10 | Otherwise, let msgcat format it before you feed it in. 11 | 12 | Current assumptions expect this script only processes filenames not starting 13 | with \`!' and with messages < 100000." 14 | exit 1 15 | } 16 | 17 | # So bash gives us why there will be a problem automatically! 18 | if [ "$1" ] && [ "$1" != '-' ]; then 19 | exec 4<"$1" 20 | else 21 | exec 4<&0 && [ "$2" ] 22 | fi || usage 23 | 24 | outpre="${2:-$1}"; outpre="${outpre%.po*}" 25 | 26 | IFS='' i=0 BUF='' 27 | if ( _variable+=syntax_test ) >/dev/null 2>&1 && 28 | { [ -z "$MSGTAC_NO_PLUSEQ" ] || [ "$MSGTAC_NO_PLUSEQ" == "0" ]; }; then 29 | while read -r line; do case "$line" in 30 | ('') BUF+=" 31 | #: !DUMMY:$((100000-i))" i=$((i+1));; 32 | (*) BUF+=" 33 | $line";; 34 | esac; done <&4 35 | else 36 | while read -r line; do case "$line" in 37 | ('') BUF="$BUF 38 | #: !DUMMY:$((100000-i))" i=$((i+1));; 39 | (*) BUF="$BUF 40 | $line";; 41 | esac; done <&4 42 | fi 43 | 44 | printf '%s\n' "$BUF" | msgcat -F -o "$outpre.rev.po" - 45 | -------------------------------------------------------------------------------- /patch-series-rename/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /patch-series-rename/README.md: -------------------------------------------------------------------------------- 1 | # bump-rel 2 | 3 | Renames patch files to aosc-os-abbs-compliant file names, based on a series 4 | file supplied by upstream distributions. 5 | 6 | ## Usage 7 | 8 | ``` 9 | patch-series-rename DISTRO SERIES_FILE 10 | 11 | - DISTRO: Distribution origin for the patch series. 12 | - SERIES_FILE: File name for the series file 13 | (e.g., seamonkey.spec, series, ...) 14 | ``` 15 | -------------------------------------------------------------------------------- /patch-series-rename/patch-series-rename: -------------------------------------------------------------------------------- 1 | _help_message() { 2 | printf "\ 3 | Usage: 4 | patch-series-rename DISTRO SERIES_FILE 5 | 6 | - DISTRO: Distribution origin for the patch series. 7 | - SERIES_FILE: File name for the series file 8 | (e.g., seamonkey.spec, series, ...). 9 | " 10 | } 11 | 12 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 13 | _help_message 14 | exit 0 15 | fi 16 | 17 | if [ -z "$1" ]; then 18 | printf "[!!!] Please specify a Linux distribution.\n\n" 19 | _help_message 20 | exit 1 21 | elif [ -z "$2" ]; then 22 | printf "[!!!] Please specify a patch series file.\n\n" 23 | _help_message 24 | exit 1 25 | fi 26 | 27 | export COUNT=0 28 | 29 | if [[ "$1" = "Fedora" ]]; then 30 | for i in `cat $2 | grep ^Patch | awk '{ print $2 }'`; do 31 | export COUNT=$(( COUNT + 1)) 32 | if [[ ${COUNT} -lt 10 ]]; then 33 | mv -v $i 000${COUNT}-${1}-$i 34 | elif [[ ${COUNT} -gt 9 && ${COUNT} -lt 100 ]]; then 35 | mv -v $i 00${COUNT}-${1}-$i 36 | elif [[ ${COUNT} -gt 99 && ${COUNT} -lt 1000 ]]; then 37 | mv -v $i 0${COUNT}-${1}-$i 38 | elif [[ ${COUNT} -gt 999 && ${COUNT} -lt 10000 ]]; then 39 | mv -v $i ${COUNT}-${1}-$i 40 | fi 41 | done 42 | else 43 | for i in `cat $2`; do 44 | export COUNT=$(( COUNT + 1)) 45 | if [[ ${COUNT} -lt 10 ]]; then 46 | mv -v $i 000${COUNT}-${1}-$i 47 | elif [[ ${COUNT} -gt 9 && ${COUNT} -lt 100 ]]; then 48 | mv -v $i 00${COUNT}-${1}-$i 49 | elif [[ ${COUNT} -gt 99 && ${COUNT} -lt 1000 ]]; then 50 | mv -v $i 0${COUNT}-${1}-$i 51 | elif [[ ${COUNT} -gt 999 && ${COUNT} -lt 10000 ]]; then 52 | mv -v $i ${COUNT}-${1}-$i 53 | fi 54 | done 55 | fi 56 | -------------------------------------------------------------------------------- /pingus_font.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | """ 3 | A weird hack used to replace the tedious GIMP-based font creation 4 | procedure in pingus. 5 | """ 6 | import png 7 | 8 | palette = [(0xff, 0xff, 0xff, i) for i in range(255, -1, -1)] 9 | 10 | def read_pgm2(pgmf) -> list: 11 | """ 12 | A dirty P2 pgm reader for fontgen only. 13 | 14 | Actually returns List[list], but let's not mess that up for now. 15 | """ 16 | assert pgrm.readline() == 'P3\n' # type 17 | assert pgrm.readline()[0] == '#' # comment signature 18 | (w, h) = map(int, pgrm.readline().split()) 19 | depth = int(pgmf.readline()) 20 | assert depth <= 255 21 | 22 | return (w, h, [int(i) for i in pgmf]) 23 | 24 | def convert(filename): 25 | """ 26 | Takes a pgm and makes it a pingus png font sprite. 27 | """ 28 | global palette 29 | with open(filename) as pgmf: 30 | w, h, pixels = read_pgm2(pgmf) 31 | writer = png.Writer(size=(w,h), palette=palette, compression=9, bitdepth=8) 32 | 33 | # We don't need any gamma here: freetype's greyscale output is a coverage map. 34 | with open(filename[:-4] + '.png', 'wb'): 35 | writer.write(pixels) 36 | 37 | import sys 38 | convert(sys.argv[1]) 39 | -------------------------------------------------------------------------------- /pjs-fetch-oracle-jdk.js: -------------------------------------------------------------------------------- 1 | var JDK_DL_PAGE = 'http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html'; 2 | var page = require('webpage').create(); 3 | var url = JDK_DL_PAGE; 4 | page.open(url, function(status) { 5 | if (status != 'success') { 6 | phantom.exit(1); 7 | } 8 | var links = page.evaluate(function() { 9 | var JDK_VER = '8u162'; 10 | var results = ''; 11 | document.getElementById('agreementjdk-' + JDK_VER + '-oth-JPR-a').click(); 12 | suffixes = ['-linux-arm32-vfp-hflt.tar.gz', '-linux-arm64-vfp-hflt.tar.gz', '-linux-x64.tar.gz']; 13 | for (var i = 0; i < suffixes.length; i++) { 14 | console.log('jdk-' + JDK_VER + '-oth-JPRXXXjdk-' + JDK_VER + suffixes[i]); 15 | var tmp = document.getElementById('jdk-' + JDK_VER + '-oth-JPRXXXjdk-' + JDK_VER + suffixes[i]).href; 16 | if (!tmp) { 17 | results += 'err '; 18 | continue; 19 | } 20 | results += (tmp + ' '); 21 | } 22 | return results; 23 | }); 24 | console.log('js-out: ' + links); 25 | phantom.exit(); 26 | }); 27 | -------------------------------------------------------------------------------- /pkg-leaves.rkt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env racket 2 | 3 | ;; Copyright 2025 Kaiyang Wu 4 | ;; 5 | ;; Permission is hereby granted, free of charge, to any person obtaining a copy 6 | ;; of this software and associated documentation files (the “Software”), to 7 | ;; deal in the Software without restriction, including without limitation the 8 | ;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 9 | ;; sell copies of the Software, and to permit persons to whom the Software is 10 | ;; furnished to do so, subject to the following conditions: 11 | ;; 12 | ;; The above copyright notice and this permission notice shall be included in 13 | ;; all copies or substantial portions of the Software. 14 | ;; 15 | ;; THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | ;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | ;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | ;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | ;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 | ;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 | ;; IN THE SOFTWARE. 22 | 23 | ;; Prerequisites on AOSC OS 24 | ;; 25 | ;; `oma install racket` 26 | ;; 27 | ;; Usage 28 | ;; 29 | ;; Run the script, and you will get all leaf nodes of the current AOSC OS 30 | ;; dependency tree (packages with no reverse dependencies). 31 | 32 | #lang racket/base 33 | 34 | (require racket/cmdline 35 | racket/contract 36 | racket/list 37 | racket/match 38 | racket/string) 39 | (require json 40 | net/url 41 | net/url-connect 42 | openssl) 43 | 44 | (current-https-protocol (ssl-secure-client-context)) 45 | 46 | (define/contract (extract-http-code header) 47 | (-> string? exact-positive-integer?) 48 | (match header 49 | [(regexp #rx"^HTTP/... ([1-9][0-9][0-9]).*" (list _ status-code)) 50 | (string->number status-code)] 51 | [_ (error 'extract-http-code "invalid http header: ~a" header)])) 52 | 53 | (define/contract (revdeps pkgname) 54 | (-> string? (listof string?)) 55 | (define url 56 | (string->url (format "https://packages.aosc.io/revdep/~a?type=json" 57 | pkgname))) 58 | (define port (get-impure-port url)) 59 | (define header (purify-port port)) 60 | (define json-res 61 | (if (= (extract-http-code header) 200) 62 | (read-json port) 63 | (error 'revdeps 64 | "failed to get reverse dependencies for ~a: status code ~a" 65 | pkgname 66 | (extract-http-code header)))) 67 | (flatten (for/list ([group (hash-ref json-res 'revdeps)]) 68 | (for/list ([p (hash-ref group 'deps)]) 69 | (hash-ref p 'package))))) 70 | 71 | (define/contract (all-packages) 72 | (-> (listof string?)) 73 | (define url (string->url "https://packages.aosc.io/list.json")) 74 | (define port (get-impure-port url)) 75 | (define header (purify-port port)) 76 | (define json-res 77 | (if (= (extract-http-code header) 200) 78 | (read-json port) 79 | (error 'all-packages 80 | "failed to get the list of all packages: status code ~a" 81 | (extract-http-code header)))) 82 | (remove-duplicates 83 | (filter-map (λ (package) 84 | (and (equal? (hash-ref package 'branch) "stable") 85 | (hash-ref package 'name))) 86 | (hash-ref json-res 'packages))) 87 | ) 88 | 89 | (define cli 90 | (command-line #:program "pkg-leaves.rkt" 91 | #:usage-help 92 | "get all leaf nodes on the dependency tree (packages with no 93 | reverse dependencies)" 94 | )) 95 | 96 | (for ([package (in-list (all-packages))]) 97 | (when (null? (revdeps package)) 98 | (displayln package))) 99 | -------------------------------------------------------------------------------- /pkg2ab: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # This is written just for those xfce tasks... Political incorrect. 3 | # This script tries to convert Archlinux PKGBUILD things into autobuild defines. It simply does the copy and paste job. 4 | 5 | # Date format. 6 | LANG=C 7 | 8 | ##General Functions, move to abbs libs. 9 | # fundump funname filename 10 | fundump(){ declare -f "$1" | head -n+1 | tail -n+2 | sed -re 's/^ //g' >> "$2"; } 11 | # funhere funname 12 | funhere(){ declare -F "$1" &>/dev/null; } 13 | # getfile file 14 | getfile(){ 15 | case "$1" in 16 | http://*|ftp://*) 17 | wget -nv "$1";; 18 | git://*) 19 | git clone --depth 1 "$1";; 20 | *://*) 21 | warn "So what's \`$1'?"; return 1;; 22 | *) 23 | echo "Local file $1." 24 | cp -l "$orig/$1" . 25 | esac 26 | set -- "$(basename $1)" 27 | # Actually we can have everything done with 7z. 28 | case "$1" in 29 | *.tar*|*.zip|*.rar|*.7z) 30 | 7z x "$1" # tar -xf "$1";; unzip "$1";; unrar "$1";; 31 | esac 32 | } 33 | ##Local Funcs 34 | die(){ echo "FATAL: $1" >&2; exit ${2-1}; } 35 | info(){ echo "INFO: $1"; } 36 | warn(){ echo "WARN: $1" >&2; } 37 | # HELP 38 | print_help() { 39 | echo -e "\e[1mUsage:\e[0m pkg2ab [PKGBUILD-PATH] \e[2m[foo.install] 40 | \e[1mVaribles that affect pkg2ab:\e[0m 41 | ArchPatch = [ 0 | 1 ] \t\t Set if pkg2ab should include PKGBUILD prepare() to autobuild patch. 42 | XtraPatch = \\\n... \t Extra things to add to autobuild patch." 43 | exit ${2-0} 44 | } 45 | # Defaults 46 | export ArchPatch=${ArchPatch=1} 47 | 48 | # Startup & Loading 49 | [ "$1" == --help ] && print_help 50 | 51 | [ -z "$1" ] && set -- $PWD/PKGBUILD 52 | . "$1" || die "Failed to load PKGBUILD $1." 53 | 54 | [ -z "$2" ] && set -- "$1" ${pkgname}.install 55 | . "$2" || echo "Never mind." 56 | 57 | temp=/tmp/mkab-$(date +%s) 58 | orig="$PWD" 59 | mkdir -p $temp 60 | cd $temp 61 | 62 | info "Fetching files as defined in PKGBUILD." 63 | for file in ${source[@]}; do getfile "$file"; done 64 | 65 | info "Creating ab files." 66 | 67 | mkdir -p autobuild.gen/patches 68 | echo "# Generated by pkg2ab on $(date)" > autobuild.gen/patch 69 | if [ "$ArchPatch" == 1 ]; then 70 | cp *.{patch/diff} autobuild.gen/patches 71 | fundump prepare autobuild.gen/patch 72 | fi 73 | echo -e "$XTraPatch" >> autobuild.gen/patch 74 | 75 | # TODO: verspec member translation to ab syntax. 76 | cat > autobuild.gen/defines << _end_of_abdef 77 | # Generated by pkg2ab on $(date) 78 | PKGNAME=$pkgname 79 | PKGVER=$pkgver 80 | PKGDES="$pkgdesc" 81 | PKGDEP="${depends[@]}" 82 | BUILDDEP="${makedepends[@]}" 83 | _end_of_abdef 84 | 85 | funhere post_install && fundump post_install autobuild.gen/postinst 86 | # TODO post_remove, ... 87 | 88 | echo -e "\e[1mComplete! \nView Results in ${temp}.\e[31m Always check the files manually before you run autobuild.\e[0m" 89 | -------------------------------------------------------------------------------- /pull-topic-as-local/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /pull-topic-as-local/README.md: -------------------------------------------------------------------------------- 1 | pull-topic-as-local 2 | =================== 3 | 4 | A simple script to pull topic repositories for use as Ciel local repositories. 5 | 6 | Usage 7 | ----- 8 | 9 | ```bash 10 | pull-topic-as-local [TOPIC_NAME_A] [TOPIC_NAME_B] ... [TOPIC_NAME_Z] 11 | ``` 12 | -------------------------------------------------------------------------------- /pull-topic-as-local/pull-topic-as-local: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | _usage(){ 4 | echo -e "\ 5 | 6 | A simple script to pull topic repositories for use as Ciel local repositories. 7 | 8 | Usage: 9 | 10 | pull-topic-as-local [TOPIC_NAME_A] [TOPIC_NAME_B] ... [TOPIC_NAME_Z] 11 | " 12 | 13 | } 14 | 15 | if [ -z $1 ]; then 16 | _usage 17 | fi 18 | 19 | for i in $@; do 20 | if [ -e OUTPUT-$i ]; then 21 | while true; do 22 | read -p "Detected an existing local repository for $i, overwrite? " yn 23 | case $yn in 24 | [Yy]* ) 25 | rm -r OUTPUT-$i; break ;; 26 | [Nn]* ) 27 | echo "Aborting, please move your local repository directory OUTPUT-$i aside."; exit 1 ;; 28 | * ) 29 | echo "Please input Y[y] or N[n]." ;; 30 | esac 31 | done 32 | fi 33 | 34 | echo "Pulling topic repository: $i ..." 35 | arch=`uname -m` 36 | [ $arch == "aarch64" ] && arch="arm64" 37 | [ $arch == "x86_64" ] && arch="amd64" 38 | [ $arch == "mips64" ] && arch="loongson3" 39 | [ $arch == "ppc64le" ] && arch="ppc64el" 40 | rsync \ 41 | -avSHPR \ 42 | --include='*/' \ 43 | rsync://mirror.anthonos.org/anthon/debs/pool/$i/main/**/*_{"${arch}",noarch}.deb \ 44 | OUTPUT-$i 45 | 46 | echo "Adjusting topic repository for Ciel: $i ..." 47 | mv -v OUTPUT-$i/*/*/*/main \ 48 | OUTPUT-$i/ 49 | rm -rv OUTPUT-$i/debs 50 | mv -v OUTPUT-$i/main \ 51 | OUTPUT-$i/debs 52 | done 53 | -------------------------------------------------------------------------------- /pushpkg/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | 16 | -------------------------------------------------------------------------------- /pushpkg/README.md: -------------------------------------------------------------------------------- 1 | pushpkg 2 | ------- 3 | 4 | A simple wrapper script for the standard AOSC OS package upload procedure. 5 | 6 | You should run this script inside a directory which contains a `debs` directory. 7 | 8 | ``` 9 | usage: pushpkg [-h] [-v] [-d] [-f] [-r] [-6] [-4] [--host [HOST]] [-i IDENTITY_FILE] [USERNAME] [BRANCH] [COMPONENT] 10 | 11 | pushpkg, push aosc package to repo.aosc.io or mirrors 12 | 13 | positional arguments: 14 | USERNAME Your LDAP username. 15 | BRANCH AOSC OS update branch (stable, stable-proposed, testing, etc.) 16 | COMPONENT (Optional) Repository component (main, bsp-sunxi, etc.) Falls back to "main" if not specified. 17 | 18 | options: 19 | -h, --help show this help message and exit 20 | -v, --verbose Enable verbose logging for ssh and rsync 21 | -d, --delete Clean OUTPUT directory after finishing uploading. 22 | -f, --force-push-noarch-package 23 | Force Push noarch package. 24 | -r, --retro Push to AOSC OS/Retro repo 25 | -6, --ipv6 Use IPv6 addresses only 26 | -4, --ipv4 Use IPv4 addresses only 27 | --host [HOST] Specify the rsync host to push packages, defaults to repo.aosc.io 28 | -i IDENTITY_FILE, --identity-file IDENTITY_FILE 29 | SSH identity file 30 | ``` 31 | -------------------------------------------------------------------------------- /pushpkg/completions/pushpkg.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | _pushpkg_list_username() { 4 | [ -n "$PUSHPKG_USERNAME" ] && echo "$PUSHPKG_USERNAME" 5 | whoami 6 | } 7 | 8 | _pushpkg_list_branch() { 9 | local cwd 10 | cwd="$(basename "$PWD")" 11 | echo "${cwd/OUTPUT-}" 12 | } 13 | 14 | _pushpkg() { 15 | local cur 16 | COMPREPLY=() 17 | cur="${COMP_WORDS[COMP_CWORD]}" 18 | 19 | components="main bsp-sunxi bsp-rk bsp-rpi bsp-qcom" 20 | options="-v --verbose -d --delete -f --force-push-noarch-package -r --retro -h --help -6 --ipv6 -4 --ipv4 --host -i --identity-file" 21 | if [[ ${cur} == -* || ${COMP_CWORD} -ge 4 ]] ; then 22 | COMPREPLY=( $(compgen -W "${options}" -- "${cur}") ) 23 | return 0 24 | fi 25 | case "${COMP_CWORD}" in 26 | 1) 27 | COMPREPLY=( $(compgen -W "$(_pushpkg_list_username)" -- "${cur}") ) 28 | ;; 29 | 2) 30 | COMPREPLY=( $(compgen -W "$(_pushpkg_list_branch)" -- "${cur}") ) 31 | ;; 32 | 3) 33 | COMPREPLY=( $(compgen -W "${components}" -- "${cur}") ) 34 | ;; 35 | esac 36 | } 37 | 38 | complete -F _pushpkg -o default pushpkg 39 | -------------------------------------------------------------------------------- /pushpkg/completions/pushpkg.fish: -------------------------------------------------------------------------------- 1 | function __pushpkg_complete_username 2 | echo "$PUSHPKG_USERNAME" && whoami 3 | end 4 | 5 | function __pushpkg_complete_branch 6 | string replace "OUTPUT-" "" (basename "$PWD") 7 | end 8 | 9 | complete -c pushpkg -s h -l help -d 'Print help information' 10 | complete -c pushpkg -s f -l force-push-noarch-package -d 'Force Push noarch package' 11 | complete -c pushpkg -s d -l delete -d 'Clean OUTPUT directory after finishing uploading' 12 | complete -c pushpkg -s r -l retro -d 'Push to AOSC OS/Retro repo' 13 | complete -c pushpkg -s v -l verbose -d 'Enable verbose logging for ssh and rsync' 14 | complete -c pushpkg -s 6 -l ipv6 -d 'Use IPv6 addresses only' 15 | complete -c pushpkg -s 4 -l ipv4 -d 'Use IPv4 addresses only' 16 | complete -c pushpkg -l host -d 'Specify the rsync host to push packages, defaults to repo.aosc.io' 17 | complete -c pushpkg -s i -l identity-file -d 'SSH identity file' 18 | complete -xc pushpkg -n "__fish_is_nth_token 1" -a "(__pushpkg_complete_username)" -d 'LDAP username' 19 | complete -xc pushpkg -n "__fish_is_nth_token 2" -a "(__pushpkg_complete_branch)" -d 'AOSC OS update branch' 20 | complete -xc pushpkg -n "__fish_is_nth_token 3" -a "main bsp-sunxi bsp-rk bsp-rpi bsp-qcom" -d 'Repository component' 21 | -------------------------------------------------------------------------------- /qt5-repack.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # KDE Qt source repository. 4 | KDE_QT_REPO='https://invent.kde.org/qt/qt/qt5' 5 | KDE_QT_BANNED_MODULES=('qtcanvas3d' 'qtfeedback' 'qtpim' 'qtqa' 'qtrepotools' 'qtsystems' 'qtdocgallery') 6 | # Catapult source repository. 7 | CATAPULT_REPO='https://chromium.googlesource.com/catapult' 8 | # QtWebEngine source repository. 9 | QTWEBENGINE_REPO='https://github.com/qt/qtwebengine' 10 | # QtWebKit source archive 11 | QTWEBKIT_URL="https://github.com/qtwebkit/qtwebkit/releases/download/qtwebkit-${QTWEBKIT_VERSION}-alpha4/qtwebkit-${QTWEBKIT_VERSION}-alpha4.tar.xz" 12 | 13 | GIT_ARCHIVE_BIN="git-archive-all" 14 | 15 | clone_kde_qt() { 16 | echo 'Cloning Qt 5 KDE fork sources ...' 17 | git clone "${KDE_QT_REPO}" 'kde-qt5' 18 | cd kde-qt5 19 | git checkout -f "${KDE_QT_COMMIT}" 20 | local COMMIT_TIMESTAMP="$(git log -1 --format=%ct)" 21 | date --date "@${COMMIT_TIMESTAMP}" '+%Y%m%d' > ../COMMIT-DATE 22 | echo '[+] Unregistering unwanted Qt components ...' 23 | git rm -rf "${KDE_QT_BANNED_MODULES[@]}" 24 | echo '[+] Replacing QtWebEngine submodule ...' 25 | git submodule set-url qtwebengine "${QTWEBENGINE_REPO}" 26 | echo '[+] Cloning Qt components ...' 27 | git submodule update --recursive --init --jobs 4 28 | echo '[+] Checking out QtWebEngine ...' 29 | git -C qtwebengine checkout -f tags/v"${QTWEBENGINE_VERSION}"-lts 30 | echo '[+] Committing changes ...' 31 | git add .gitmodules qtwebengine 32 | git config --local user.name 'Bot' 33 | git config --local user.email 'bot@aosc.io' 34 | git commit -m "[AUTO] Sync QtWebEngine to ${QTWEBENGINE_VERSION}" 35 | git submodule update --recursive --init 36 | echo '[+] Archiving Git repository using git-archive-all ...' 37 | "${GIT_ARCHIVE_BIN}" --force-submodules ../qt-5.tmp.tar 38 | cd .. 39 | } 40 | 41 | fetch_webkit() { 42 | echo '[+] Fetching Qt Webkit ...' 43 | wget "${QTWEBKIT_URL}" 44 | tar xf "qtwebkit-${QTWEBKIT_VERSION}-alpha4.tar.xz" 45 | } 46 | 47 | [[ x"${QT_VERSION}" = "x" ]] && echo "QT_VERSION not set." && exit 1 48 | [[ x"${QTWEBENGINE_VERSION}" = "x" ]] && echo "QTWEBENGINE_VERSION not set." && exit 1 49 | [[ x"${QTWEBKIT_VERSION}" = "x" ]] && echo "QTWEBKIT_VERSION not set. Go to https://github.com/qtwebkit/qtwebkit/tags to figure it out." && exit 1 50 | [ -z "${KDE_QT_COMMIT}" ] && echo "KDE_QT_COMMIT not set. Go to https://invent.kde.org/qt/qt/qt5/-/tree/kde/5.15 to figure it out." && exit 1 51 | 52 | echo '[+] Performing pre-repack clean-up ...' 53 | rm -rf \ 54 | 'catapult' \ 55 | 'kde-qt5' \ 56 | 'qtwebengine' \ 57 | "qtwebkit-${QTWEBKIT_VERSION}-alpha4.tar.xz" \ 58 | "qt-5-${QT_VERSION}+webengine${QTWEBENGINE_VERSION}+webkit${QTWEBKIT_VERSION}+kde"*.tar* \ 59 | "qt-5.tmp.tar" \ 60 | "qtwebengine.tmp.tar" 61 | 62 | echo '[+] Installing git-archive-all utility ...' 63 | pip3 install --user --upgrade git-archive-all 64 | "${GIT_ARCHIVE_BIN}" --version 65 | 66 | clone_kde_qt & 67 | KDE_QT_JOB="$!" 68 | 69 | fetch_webkit & 70 | WEBKIT_JOB="$!" 71 | 72 | wait $KDE_QT_JOB $WEBKIT_JOB 73 | 74 | echo '[+] Cleaning up downloaded files ...' 75 | rm -rf 'catapult' 'kde-qt5' "qtwebkit-${QTWEBKIT_VERSION}-alpha4.tar.xz" 76 | 77 | echo '[+] Assembling Qt 5 repack (main sources) ...' 78 | tar xf qt-5.tmp.tar 79 | KDE_QT_COMMIT_DATE="$(cat COMMIT-DATE)" && rm -v COMMIT-DATE 80 | mv -v qt-5.tmp qt-5 81 | 82 | echo '[+] Assembling Qt 5 repack (QtWebKit) ...' 83 | rm -v "qtwebkit-${QTWEBKIT_VERSION}-alpha4"/WebKit.pro 84 | mv -v "qtwebkit-${QTWEBKIT_VERSION}-alpha4" ./qt-5/qtwebkit 85 | 86 | echo '[+] Running syncqt.pl for module headers ...' 87 | cd qt-5 88 | for i in $(find . -maxdepth 1 -type d -name "qt*"); do 89 | cd "$i" 90 | ../qtbase/bin/syncqt.pl -version "${QT_VERSION}" || true 91 | cd .. 92 | done 93 | cd .. 94 | 95 | echo '[+] Compressing final tarball ...' 96 | tar cf "qt-5-${QT_VERSION}+webengine${QTWEBENGINE_VERSION}+webkit${QTWEBKIT_VERSION}+kde${KDE_QT_COMMIT_DATE}.tar" qt-5 97 | xz -9e -T0 "qt-5-${QT_VERSION}+webengine${QTWEBENGINE_VERSION}+webkit${QTWEBKIT_VERSION}+kde${KDE_QT_COMMIT_DATE}.tar" 98 | 99 | echo '[+] Cleaning up ...' 100 | rm -rf qt-5 qt-5.tmp.tar 101 | 102 | echo '[+] Done!' 103 | -------------------------------------------------------------------------------- /qtsmerge: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 'msgmerge' for Qt Ts. 3 | # Very naive method -- transform to po{,t}, merge and sweep. 4 | trap ' 5 | _ret=("${PIPESTATUS[@]}") IFS=, 6 | printf 'E:\t%s' "Line $LINENO, rets: ${_ret[*]}" >&2 7 | exit "$_ret" 8 | :' ERR 9 | 10 | if [[ $1 == '--help' ]]; then 11 | cat <<'EOF' 12 | qtsmerge: merges Qt ts files using msgmerge for drunk guys. 13 | The usage is the same as msgmerge, except that the output is always def.po. 14 | (I don't want to parse -o for now) 15 | EOF 16 | exec msgmerge --help 17 | 18 | # die early die quick 19 | command -v msgmerge mktemp >/dev/null 20 | 21 | if command -v lconvert >/dev/null; then 22 | ts2po(){ lconvert -if ts -of po -locations relative -o "$2" "$1"; } 23 | po2ts(){ lconvert -if po -of ts -locations absolute -o "$2" "$1"; } 24 | elif command -v po2ts ts2po >/dev/null; then 25 | : 26 | else 27 | false # No convertors found! 28 | fi 29 | 30 | tmpdir="$(mktemp -d)" 31 | args=("$@") 32 | 33 | cmd_subst(){ 34 | result=$("$@"; echo x) 35 | result=${result%x} 36 | } 37 | 38 | # wild assumptions on args 39 | pos=() 40 | tss=() 41 | for ts in "${args[-2]}" "${args[-1]}"; do 42 | cmd_subst basename -- "$ts" 43 | pos+=("${tmpdir}/${result%.[tT][sS]}.po") 44 | tss+=("$ts") 45 | ts2po "$ts" "${pos[-1]}" 46 | done 47 | 48 | # 0: po, 1: pot 49 | cp -l "${tss[0]}"{,.bak} 50 | msgmerge -o "${pos[1]}" "${@:1:$#-2}" "${pos[1]}" "${pos[0]}" 51 | 52 | po2ts "${pos[0]}" "${tss[0]}" 53 | -------------------------------------------------------------------------------- /repo-manifest-helper/README.md: -------------------------------------------------------------------------------- 1 | # repo-manifest-helper 2 | 3 | A companion script for repo-manifest. 4 | 5 | ## Usage 6 | 7 | `python3 main.py ` 8 | 9 | You can download the `mirrors.yml` from https://github.com/AOSC-Dev/aosc-portal-kiss.github.io/raw/master/data/mirrors.yml. 10 | -------------------------------------------------------------------------------- /repo-manifest-helper/main.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import logging 3 | import requests 4 | import os 5 | import threading 6 | import sys 7 | 8 | from typing import Optional 9 | 10 | 11 | def load_config(config: str) -> dict: 12 | with open(config, 'rt') as f: 13 | return yaml.safe_load(f) 14 | 15 | 16 | def test_mirror(mirror: dict, results: list): 17 | logging.info('Testing mirror %s...' % mirror['name']) 18 | try: 19 | requests.get(os.path.join( 20 | mirror['url'], 'aosc-os'), timeout=10).raise_for_status() 21 | results.append(mirror) 22 | except Exception: 23 | return None 24 | 25 | 26 | def test_mirrors(config: dict) -> list: 27 | valid_mirrors = [] 28 | for i in config['mirrors']: 29 | test_mirror(i, valid_mirrors) 30 | return valid_mirrors 31 | 32 | 33 | def guess_mirror_slug(mirror: dict) -> Optional[str]: 34 | def find_char(i): 35 | name_lower = name.lower() 36 | for j in names[i]: 37 | if name_lower.find(j.lower()) < 0: 38 | return False 39 | return True 40 | name: str = mirror['name'] 41 | hostname = mirror['url'].split('://', 1)[1] 42 | hostname = hostname.split('/', 1)[0] 43 | names = hostname.split('.') 44 | slug = None 45 | for i in range(1, len(names) - 1): 46 | if find_char(i): 47 | slug = names[i] 48 | break 49 | return slug 50 | 51 | 52 | if __name__ == "__main__": 53 | logging.getLogger().setLevel(logging.INFO) 54 | candidates = test_mirrors(load_config(sys.argv[1])) 55 | for i in candidates: 56 | slug = guess_mirror_slug(i) 57 | if not slug: 58 | logging.warning("Unable to guess the slug for %s" % i['name']) 59 | print("[[mirrors]]\nname = \"%s\"\nname-tr = \"%s\"\nurl = \"%s\"\nloc = \"%s\"\nloc-tr = \"%s\"\n" % ( 60 | i['name'], slug + '-name' if slug else '', i['url'] + 'aosc-os/', i['region'], slug + 61 | '-loc' if slug else '' 62 | )) 63 | -------------------------------------------------------------------------------- /repo-refresh/repo-refresh.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Check for package update 3 | ConditionACPower=true 4 | 5 | [Service] 6 | Type=oneshot 7 | ExecStart=/usr/libexec/repo-refresh.sh 8 | -------------------------------------------------------------------------------- /repo-refresh/repo-refresh.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if command -v oma > /dev/null; then 4 | oma refresh 5 | else 6 | apt update 7 | fi 8 | -------------------------------------------------------------------------------- /repo-refresh/repo-refresh.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Check for package updates 3 | 4 | [Timer] 5 | OnCalendar=*-*-* 6,18:00 6 | RandomizedDelaySec=12h 7 | AccuracySec=1h 8 | Persistent=true 9 | 10 | [Install] 11 | WantedBy=timers.target 12 | -------------------------------------------------------------------------------- /repository-notifier/README.md: -------------------------------------------------------------------------------- 1 | Deploying 2 | --------- 3 | 4 | ### Create A Bot 5 | Contact @BotFather to create a new bot, he will also create a token for accessing the bot. 6 | 7 | ### Find the ZeroMQ Interface Address 8 | The address is placed in the configuration file of p-vector (or other compatible software) 9 | It looks like `tcp://repo.aosc.io:xxxxx`. 10 | 11 | ### Launch the Bot 12 | Install requirements: 13 | ``` 14 | pip3 install --user pyzmq aiogram 15 | ``` 16 | 17 | Start the bot: 18 | ``` 19 | TELEGRAM_TOKEN=12345:aB_cdEf_gfoobar python3 telegram.py tcp://repo.aosc.io:xxxxx 20 | ``` 21 | 22 | ### Set the Chat IDs to Notify 23 | 1. Add the bot in your channels or groups (referred to as "chats" in Bot API terms). 24 | 2. Send a /start@... (bot's username) message. 25 | 3. Check the "chat_ids.lst" in current working directory, you will found the id of the chat. 26 | 4. Copy-and-paste the id to "notify_chat_ids.lst" file. If there is not, create one. 27 | 28 | >> IDs of users are positive numbers, IDs of channels and groups are negative numbers. 29 | 30 | Ctrl+C to stop the bot and restart it to apply the configuration. 31 | 32 | ### Additional Information 33 | 34 | These debugging commands may be useful. 35 | 36 | - `/start` 37 | - `/stop` 38 | - `/ping` 39 | 40 | The general method to get the chat ID (instead of looking for "chat_ids.lst") is as follows: 41 | 42 | 1. Browse the URL https://api.telegram.org/botYOUR_BOT_TOKEN/getUpdates 43 | and you will see events the bot recently received. 44 | 2. Send some messages to the bot, refresh the "getUpdates" page mentioned 45 | at previous step, check if there is a new event. 46 | 3. Now you may add the bot to your chats and after refreshing the "getUpdates" 47 | you will see the IDs. 48 | 49 | -------------------------------------------------------------------------------- /repository-notifier/telegram.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from aiogram import Bot, types 4 | from aiogram.dispatcher import Dispatcher 5 | from aiogram.utils import executor 6 | from random import randint 7 | from threading import Thread 8 | import asyncio 9 | import itertools 10 | import os 11 | import sys 12 | 13 | if len(sys.argv) == 1: 14 | print('Usage: ') 15 | print(' TELEGRAM_TOKEN=xxx:yyyyy python3 %s ZMQ_ADDRESS' % sys.argv[0]) 16 | print('Place Chat IDs you want to send notification') 17 | print(' in notify_chat_ids.lst, one ID per line.') 18 | exit() 19 | 20 | PENDING_DURATION = 30 21 | PENDING_MAX_SIZE = 20 22 | LIST_MAX_SIZE = 30 23 | TOKEN = os.environ['TELEGRAM_TOKEN'] 24 | ZMQ_CHANGES = sys.argv[1] 25 | 26 | bot = Bot(token=TOKEN) 27 | 28 | with open('chat_id.lst', 'r') as f: 29 | chat_ids = set([int(line) for line in f.readlines()]) 30 | 31 | with open('notify_chat_id.lst', 'r') as f: 32 | notify_chat_ids = set([int(line) for line in f.readlines()]) 33 | 34 | dp = Dispatcher(bot) 35 | 36 | @dp.message_handler(commands=['start']) 37 | async def send_welcome(message: types.Message): 38 | chat_id = int(message.chat.id) 39 | if chat_id in chat_ids: 40 | return 41 | chat_ids.add(int(message.chat.id)) 42 | with open('chat_id.lst', 'w') as f: 43 | for i in chat_ids: 44 | f.write(str(i)+'\n') 45 | await message.reply("喵") 46 | 47 | @dp.message_handler(commands=['stop']) 48 | async def send_welcome(message: types.Message): 49 | chat_id = int(message.chat.id) 50 | if chat_id not in chat_ids: 51 | return 52 | chat_ids.remove(int(message.chat.id)) 53 | with open('chat_id.lst', 'w') as f: 54 | for i in chat_ids: 55 | f.write(str(i)+'\n') 56 | await message.reply("发不出声音了") 57 | 58 | @dp.message_handler(commands=['ping']) 59 | async def send_echo(message: types.Message): 60 | await bot.send_chat_action(message.chat.id, action=types.ChatActions.TYPING) 61 | 62 | import zmq 63 | import zmq.asyncio 64 | ctx = zmq.asyncio.Context.instance() 65 | s = ctx.socket(zmq.SUB) 66 | s.connect(ZMQ_CHANGES) 67 | s.subscribe(b'') 68 | 69 | def classify(pending_list: list): 70 | msg = '' 71 | def get_header(p): 72 | comp = p['comp'] 73 | arch = p['arch'] 74 | return f'{comp} {arch}\n' 75 | pending_list.sort(key=get_header) 76 | for header, g in itertools.groupby(pending_list, key=get_header): 77 | entries = list(g) 78 | msg += header 79 | preferred_order = ['delete', 'new', 'overwrite', 'upgrade'] 80 | entries.sort(key=lambda x: (preferred_order.index(x['method']), x['pkg'])) 81 | too_long = len(entries) > LIST_MAX_SIZE 82 | for p in entries if not too_long else entries[:LIST_MAX_SIZE]: 83 | pkg = p['pkg'] 84 | to_ver = p['to_ver'] 85 | from_ver = p['from_ver'] 86 | method = p['method'] 87 | if method == 'upgrade': 88 | msg += f' ^ {pkg} {from_ver}{to_ver}\n' 89 | if method == 'new': 90 | msg += f' + {pkg} {to_ver}\n' 91 | if method == 'delete': 92 | msg += f' - {pkg} {from_ver}\n' 93 | if method == 'overwrite': 94 | msg += f' * {pkg} {from_ver}\n' 95 | if too_long: 96 | remain = len(entries) - LIST_MAX_SIZE 97 | msg += f'and {remain} more...\n' 98 | msg += '\n' 99 | print(msg) 100 | return msg[:-1] 101 | 102 | async def co(): 103 | pending_list = [] 104 | while True: 105 | try: 106 | message = await asyncio.wait_for(s.recv_json(), timeout=PENDING_DURATION) 107 | print(message) 108 | pending_list.append(message) 109 | if len(pending_list) > PENDING_MAX_SIZE: 110 | raise asyncio.TimeoutError() 111 | except asyncio.TimeoutError: 112 | if len(pending_list) > 0: 113 | print('send', len(pending_list)) 114 | for chat_id in notify_chat_ids: 115 | await bot.send_message(chat_id, classify(pending_list), 116 | parse_mode='HTML', 117 | disable_web_page_preview=True) 118 | pending_list = [] 119 | 120 | asyncio.ensure_future(co(), loop=dp.loop) 121 | 122 | if __name__ == '__main__': 123 | executor.start_polling(dp) 124 | 125 | -------------------------------------------------------------------------------- /speculator/COPYING: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | -------------------------------------------------------------------------------- /speculator/README.md: -------------------------------------------------------------------------------- 1 | speculator 2 | ========== 3 | 4 | Rewrites your stinky out-dated ACBS `spec` files. 5 | 6 | ``` 7 | Usage: 8 | 9 | speculator [spec file] 10 | ``` 11 | -------------------------------------------------------------------------------- /speculator/speculator: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | _help_message() { 3 | printf "\ 4 | Rewrites your stinky out-dated ACBS `spec` files. 5 | 6 | Usage: 7 | 8 | speculator [spec file] 9 | 10 | " 11 | } 12 | 13 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 14 | _help_message 15 | exit 0 16 | fi 17 | 18 | if [ -z "$1" ]; then 19 | echo -e "[!!!] Please specify a spec to reform!\n" 20 | _help_message 21 | exit 1 22 | fi 23 | 24 | if [ ! -f "$1" ]; then 25 | echo -e "[!!!] Please specify a normal file!\n" 26 | _help_message 27 | exit 1 28 | fi 29 | 30 | if grep -q '^SRCTBL=' $1; then 31 | echo "Tarball source detected ..." 32 | source $1 33 | sed -e 's|^SRCTBL\=\"|SRCS\=\"tbl\:\:|g' \ 34 | -e 's|^CHKSUM=|CHKSUMS=|g' \ 35 | -i $1 36 | elif grep -q '^DUMMYSRC=' $1; then 37 | echo "Dummy source detected, skipping ..." 38 | elif grep -q '^.*SRC=' $1; then 39 | echo "VCS source detected ..." 40 | sed -e 's|\$|\\$|g' -i $1 41 | source $1 42 | export VCS="$(grep '^.*SRC=' $1 | awk -F"SRC" '{ print $1 }')" 43 | export VCSCO="${VCS}CO" 44 | export VCSSRC="${VCS}SRC" 45 | printf "\ 46 | SRCS=\"${VCS,,}::commit=${!VCSCO}::${!VCSSRC}\" 47 | CHKSUMS=\"SKIP\" 48 | " >> $1 49 | sed -e "/$(echo $VCSSRC)/d" \ 50 | -e "/$(echo $VCSCO)/d" \ 51 | -i $1 52 | elif grep -q '^SRCS.*=' $1; then 53 | echo "New spec format detected, skipping ..." 54 | else 55 | echo "Unknown spec format, or non-spec file, skipping ..." 56 | fi 57 | -------------------------------------------------------------------------------- /spiral/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /spiral/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "spiral" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1" 8 | clap = { version = "4", features = ["derive"] } 9 | dirs = "6.0.0" 10 | reqwest = { version = "0.12", features = ["blocking"] } 11 | flate2 = "1.0.35" 12 | bincode = "2" 13 | oma-contents = "0.17.0" 14 | fancy-regex = "0.14.0" 15 | simplelog = "0.12.2" 16 | log = "0.4" 17 | rayon = "1.10.0" 18 | -------------------------------------------------------------------------------- /spiral/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashSet, 3 | fs::File, 4 | io::{BufRead, BufReader, Write}, 5 | path::Path, 6 | }; 7 | 8 | use anyhow::{Context, Result}; 9 | use bincode::config::{self}; 10 | use clap::{ArgAction, Parser, Subcommand}; 11 | use dirs::cache_dir; 12 | use fancy_regex::Regex; 13 | use flate2::read::GzDecoder; 14 | use log::info; 15 | use oma_contents::{ 16 | parser::parse_contents_single_line, 17 | searcher::{search, Mode}, 18 | }; 19 | use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; 20 | use reqwest::blocking::ClientBuilder; 21 | use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode}; 22 | 23 | const USER_AGENT: &str = "Wget/1.20.3 (linux-gnu)"; 24 | const UBUNTU_CODENAME: &[&str] = &["jammy", "noble"]; 25 | 26 | #[derive(Debug, Parser)] 27 | struct App { 28 | #[clap(subcommand)] 29 | subcmd: Subcmd, 30 | } 31 | 32 | #[derive(Debug, Subcommand)] 33 | enum Subcmd { 34 | /// Query aosc package name from debian package name 35 | Query { 36 | /// Package name 37 | #[arg(requires = "true", action = ArgAction::Append)] 38 | names: Vec, 39 | }, 40 | /// Update cache 41 | UpdateCache, 42 | } 43 | 44 | fn main() -> Result<()> { 45 | let app = App::parse(); 46 | 47 | TermLogger::init( 48 | LevelFilter::Info, 49 | Config::default(), 50 | TerminalMode::Stderr, 51 | ColorChoice::Auto, 52 | )?; 53 | 54 | let dir = cache_dir().context("Failed to get cache dir")?; 55 | 56 | let spiral_data_path = dir.join("spiral_data"); 57 | 58 | match app.subcmd { 59 | Subcmd::Query { names } => { 60 | let data = if !spiral_data_path.exists() { 61 | update_data(&dir)? 62 | } else { 63 | let f = File::open(spiral_data_path)?; 64 | let reader = BufReader::new(f); 65 | bincode::decode_from_reader(reader, config::standard())? 66 | }; 67 | 68 | let mut set = HashSet::new(); 69 | 70 | let res = data 71 | .iter() 72 | .filter(|x| names.contains(&x.1)) 73 | .flat_map(|x| Path::new(&x.0).file_name().map(|x| x.to_string_lossy())); 74 | 75 | for i in res { 76 | let pkg = get_aosc_package_name(&i)?; 77 | for p in pkg { 78 | set.insert(p); 79 | } 80 | } 81 | 82 | for i in &set { 83 | print!("{} ", i); 84 | } 85 | 86 | if !set.is_empty() { 87 | println!(); 88 | } 89 | } 90 | Subcmd::UpdateCache => { 91 | update_data(&dir)?; 92 | } 93 | } 94 | 95 | Ok(()) 96 | } 97 | 98 | fn get_aosc_package_name(so_file: &str) -> Result> { 99 | let mut v = vec![]; 100 | 101 | let input = format!("/usr/lib/{}", so_file); 102 | 103 | info!("Searching {input}"); 104 | 105 | search( 106 | "/var/lib/apt/lists", 107 | Mode::Provides, 108 | &input, 109 | |(pkg, file)| { 110 | if file == input { 111 | v.push(pkg); 112 | } 113 | }, 114 | ) 115 | .ok(); 116 | 117 | Ok(v) 118 | } 119 | 120 | fn update_data(dir: &Path) -> Result> { 121 | info!("Updating spiral cache"); 122 | 123 | let re = Regex::new( 124 | r"/?usr/lib/(?:x86_64-linux-gnu/)?(?Plib[a-zA-Z0-9\-._+]+\.so(?:\.[0-9]+)*)", 125 | )?; 126 | let client = ClientBuilder::new().user_agent(USER_AGENT).build()?; 127 | 128 | let res = UBUNTU_CODENAME 129 | .par_iter() 130 | .flat_map(|i| -> Result> { 131 | let mut res = vec![]; 132 | 133 | let resp = client 134 | .get(format!( 135 | "http://archive.ubuntu.com/ubuntu/dists/{}/Contents-amd64.gz", 136 | i 137 | )) 138 | .send()? 139 | .error_for_status()?; 140 | 141 | let reader = BufReader::new(GzDecoder::new(resp)); 142 | 143 | for i in reader.lines() { 144 | let i = i?; 145 | let (file, pkgs) = parse_contents_single_line(&i)?; 146 | if re.is_match(file)? { 147 | for p in pkgs { 148 | res.push(( 149 | file.trim().to_string(), 150 | p.split('/') 151 | .last() 152 | .context("Failed to parse contents line")? 153 | .to_string(), 154 | )); 155 | } 156 | } 157 | } 158 | 159 | Ok(res) 160 | }) 161 | .flatten() 162 | .collect::>(); 163 | 164 | let mut f = File::create(dir.join("spiral_data"))?; 165 | let dst = bincode::encode_to_vec(&res, config::standard())?; 166 | f.write_all(&dst)?; 167 | 168 | info!("Updated spiral cache"); 169 | 170 | Ok(res) 171 | } 172 | -------------------------------------------------------------------------------- /st: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ $# != 1 ]]; then 4 | echo "Usage: st [NAME]" 5 | exit 1 6 | fi 7 | 8 | tmux a -t $1 9 | 10 | if [ $? = 1 ]; then 11 | tmux new -s $1 12 | fi 13 | -------------------------------------------------------------------------------- /suicide.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # suicide.sh: Running this is quite like ...... 3 | # FOR REFERENCE USE ONLY, NOT A LINTER 4 | 5 | _grep(){ grep --color=auto -EHnr "$@"; } 6 | 7 | # Dunno how to exclude stuffs like `string w' and `something else'. 8 | # I said reference only. 9 | _grep '`[[:alnum:]_]* .*`' * 10 | _grep -A1 '(el(se|if)|then)' * | grep --color=auto -EB1 'true' # [:-]\s*(true|:|false)' 11 | _grep 'echo\s+$[[:alnum:]_]*\s*\|[^|]' * 12 | -------------------------------------------------------------------------------- /sz: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ $# != 1 ]]; then 4 | echo "Usage: sz [NAME]" 5 | exit 1 6 | fi 7 | 8 | zellij a -c $1 9 | -------------------------------------------------------------------------------- /translations/.gitignore: -------------------------------------------------------------------------------- 1 | /venv 2 | /.mypy_cache 3 | -------------------------------------------------------------------------------- /translations/refresh-tp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import requests 3 | import os 4 | import sys 5 | import re 6 | import html5lib 7 | import logging 8 | import semver 9 | import subprocess 10 | 11 | from typing import List, Dict 12 | 13 | # $1: package name $2: version string 14 | matching_patt = r'(.*?)-(\d.*?).zh_CN' 15 | po_dl_url = 'https://translationproject.org/PO-files/{lang}/{fn}' 16 | po_name = '{pkg}-{ver}.{lang}.po' 17 | domain_url = 'https://translationproject.org/domain/index.html' 18 | 19 | 20 | def collect_local_info(dirname: str): 21 | files = [] 22 | for f in os.listdir(dirname): 23 | if not os.path.isfile(os.path.join(dirname, f)): 24 | continue 25 | if not f.endswith('.po'): 26 | continue 27 | matched = re.search(matching_patt, f) 28 | if not matched: 29 | continue 30 | domain = matched.groups() 31 | if len(domain) != 2: 32 | continue 33 | files.append(tuple(domain)) 34 | return files 35 | 36 | 37 | def collect_remote_info() -> Dict[str, str]: 38 | parser = html5lib.HTMLParser(tree=html5lib.getTreeBuilder("dom")) 39 | domain_data = requests.get(domain_url) 40 | parsed = parser.parse(domain_data.text) 41 | nodes = parsed.getElementsByTagName('tbody')[0] 42 | nodes = nodes.childNodes 43 | head = True 44 | remote_data = {} 45 | for node in nodes: 46 | if node.nodeType == 3: 47 | continue 48 | if head: 49 | head = False 50 | continue 51 | # node -> -> -> text node 52 | pkg_name = node.childNodes[1].childNodes[0].childNodes[0].nodeValue 53 | pkg_ver = node.childNodes[3].childNodes[0].childNodes[0].nodeValue 54 | remote_data[pkg_name] = pkg_ver 55 | return remote_data 56 | 57 | 58 | def download_po(pkg, ver, lang, folder='.'): 59 | po_file = po_name.format(pkg=pkg, ver=ver, lang=lang) 60 | po_url = po_dl_url.format(lang=lang, fn=po_file) 61 | logging.warning('Downloading %s...' % po_file) 62 | resp = requests.get(po_url) 63 | if resp.status_code not in range(200, 300): 64 | logging.error('Download error: %s' % resp.status_code) 65 | return 66 | with open(os.path.join(folder, po_file), 'wt') as f: 67 | f.write(resp.text) 68 | 69 | 70 | def main(): 71 | if len(sys.argv) < 2: 72 | print('%s ' % sys.argv[0]) 73 | sys.exit(1) 74 | logging.warning('Scanning files...') 75 | local = collect_local_info(sys.argv[1]) 76 | logging.warning('Fetching remote data...') 77 | remote = collect_remote_info() 78 | for f in local: 79 | remote_ver = remote.get(f[0]) 80 | if not remote_ver: 81 | logging.error('Local file %s not found in remote data' % f[0]) 82 | continue 83 | if f[1] == remote_ver: 84 | continue 85 | try: 86 | if semver.compare(f[1], remote_ver) >= 0: 87 | logging.info('Local file %s is up to date' % f[0]) 88 | continue 89 | except ValueError: 90 | pass 91 | download_po(f[0], remote_ver, 'zh_CN', sys.argv[1]) 92 | po_file = po_name.format(pkg=f[0], ver=f[1], lang='zh_CN') 93 | po_file = os.path.join(sys.argv[1], po_file) 94 | pot_file = po_name.format(pkg=f[0], ver=remote_ver, lang='zh_CN') 95 | pot_file = os.path.join(sys.argv[1], pot_file) 96 | if not subprocess.call(['msgmerge', po_file, pot_file, '-o', pot_file]): 97 | os.remove(po_file) 98 | 99 | 100 | if __name__ == '__main__': 101 | main() 102 | -------------------------------------------------------------------------------- /translations/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | semver 3 | html5lib 4 | -------------------------------------------------------------------------------- /univt-fonts/convert_univt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import sys 5 | import warnings 6 | 7 | import bdflib 8 | 9 | def convert_bdf(bdffont): 10 | for i in range(0x10000): 11 | if i in bdffont.glyphs_by_codepoint: 12 | glyph = bdffont.glyphs_by_codepoint[i] 13 | data = glyph.data.copy() 14 | data.reverse() 15 | if glyph.bbW > 16: 16 | warnings.warn("glyph U+%04x width %d > 16, dropped" % (i, glyph.bbW)) 17 | yield [0]*32 18 | else: 19 | expanded = [row<<(16-glyph.bbW) for row in data] 20 | yield [row>>8 for row in expanded] + [row&0xff for row in expanded] 21 | else: 22 | yield [0]*32 23 | 24 | def format_header(glyphs): 25 | yield 'static unsigned char font_utf8[2097152] = {' 26 | for k, row in enumerate(glyphs): 27 | if k < 0x20 or 0xD800 <= k <= 0xDFFF or k == 0xFFFF: 28 | yield '// %d ;' % k 29 | else: 30 | yield '// %d %s ;' % (k, chr(k)) 31 | yield (',' if k else '') + ','.join('0x%02x' % x for x in row) 32 | yield '};' 33 | 34 | if __name__ == '__main__': 35 | # python3 convert_univt.py unifont-*.bdf fonts_utf8.h 36 | bdffont = bdflib.read_bdf(open(sys.argv[1])) 37 | with open(sys.argv[2], 'w') as f: 38 | for line in format_header(convert_bdf(bdffont)): 39 | f.write(line + '\n') 40 | -------------------------------------------------------------------------------- /univt-fonts/extract_univt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import sys 5 | import bdflib 6 | 7 | def read_font_header(filename): 8 | with open(filename, 'r') as f: 9 | f.readline() 10 | for ln in f: 11 | if ln.startswith('//') or ln.startswith('}'): 12 | pass 13 | else: 14 | g = [int(x, 0) for x in ln.strip().strip(',').split(',')] 15 | high = g[:16] 16 | low = g[16:] 17 | if any(low): 18 | w = 16 19 | g1 = ['%04X' % (h<<8 | l) for h, l in zip(high, low)] 20 | else: 21 | w = 8 22 | g1 = ['%02X' % x for x in high] 23 | yield g1, w 24 | 25 | def generate_bdf(fontdata): 26 | bdffont = bdflib.Font('univt', 16, 75, 75) 27 | for k, (g, w) in enumerate(fontdata): 28 | bdffont.new_glyph_from_data('U+%04X' % k, g, 0, -2, w, 16, 16, k) 29 | return bdffont 30 | 31 | if __name__ == '__main__': 32 | # python3 extract_univt.py fonts_utf8.h fonts_utf8.bdf 33 | bdffont = generate_bdf(read_font_header(sys.argv[1])) 34 | with open(sys.argv[2], 'w') as f: 35 | bdflib.write_bdf(bdffont, f) 36 | -------------------------------------------------------------------------------- /webrtc-repack/README.md: -------------------------------------------------------------------------------- 1 | # WebRTC Source Packer 2 | 3 | This small scriptlet helps you to generate a tar archive of WebRTC source tree with the WebRTC revision used by the current Chromium stable. 4 | 5 | ## Requirements 6 | 7 | - curl jq perl git python3 tar xz rsync 8 | - A very fast and stable network connection is required 9 | - At least 10 GB of disk space 10 | 11 | ## Usage 12 | 13 | Just run `./make-repack.sh` and wait. 14 | -------------------------------------------------------------------------------- /webrtc-repack/make-repack.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | for i in curl jq perl git python3 tar xz rsync; do 3 | if ! command -v "$i" > /dev/null; then 4 | echo "[!] Please install $i!" 5 | exit 1 6 | fi 7 | done 8 | 9 | echo '[-] Fetching Chromium (stable) version information ...' 10 | VERSION="$(curl -sL 'https://omahaproxy.appspot.com/all.json?channel=stable&os=linux' | jq --raw-output '.[0].versions[0].version' -)" 11 | echo "[-] Current version seems to be $VERSION" 12 | echo '[-] Fetching components information ...' 13 | DEPS="$(curl -sL "https://chromium.googlesource.com/chromium/src/+/$VERSION/DEPS?format=TEXT" | base64 -d)" 14 | echo '[-] Finding WebRTC commit information ...' 15 | OWT_COMMIT="$(echo "$DEPS" | perl -ne "/Var\('webrtc_git'\).+?'([0-9a-f]{40})'/ && print \"\$1\"")" 16 | [ -z "$OWT_COMMIT" ] && exit 1 17 | echo "[-] WebRTC commit to use: $OWT_COMMIT" 18 | echo "[+] Making a tmp directory ..." 19 | TMPDIR="$(mktemp -d)" 20 | pushd "$TMPDIR" 21 | echo "[+] Downloading depot tools ..." 22 | git clone --depth 1 https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools 23 | export PATH="$PATH:$(pwd)/depot_tools" 24 | export DEPOT_TOOLS_UPDATE=0 25 | cat << EOF > .gclient 26 | solutions = [ 27 | { 28 | "managed": False, 29 | "name": "src", 30 | "url": "https://webrtc.googlesource.com/src.git", 31 | "custom_deps": {}, 32 | "deps_file": "DEPS" 33 | }, 34 | ] 35 | target_os = [] 36 | EOF 37 | echo '[+] Downloading source trees using gclient, please wait patiently ...' 38 | gclient sync --rev "$OWT_COMMIT" --no-history -n 39 | echo '[+] Packing tarball ...' 40 | TARBALL="webrtc-${OWT_COMMIT:0:7}.tar" 41 | tar cf "$TARBALL" src 42 | xz -T0 "$TARBALL" 43 | popd 44 | rsync "$TMPDIR/$TARBALL.xz" . 45 | echo "[-] Removing the directory ..." 46 | rm -rf "$TMPDIR" 47 | echo "Done. Your tarball is ready: $(readlink -f "$TARBALL.xz")" 48 | -------------------------------------------------------------------------------- /whatlib.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # certainly useless stuffs 3 | 4 | # Prototype! 5 | _ancient_getlongopts(){ 6 | declare -A OPTLONG 7 | OPTLONG[verbose]=v 8 | OPTLONG[quiet]=q 9 | OPTLONG[help]=H 10 | OPTLONG[wait]=W: 11 | OPTLONG['--']=-- 12 | local OPTIONS=("$@") 13 | declare -p OPTIONS 14 | while getopts 'vqhHW:-:' OPT; do 15 | case "$OPT" in 16 | (-) _ancient_longopt_handler;; 17 | (*) _opt_handler_$OPT "$OPTARG";; 18 | esac 19 | done 20 | } 21 | _ancient_longopt_handler(){ 22 | local OPTEXPD="${OPTLONG[$OPTARG]}" 23 | case "$OPTEXPD" in 24 | (--) echo "Unrecognized longopt $OPTARG" >&2; return 1;; 25 | (*::) echo "Unsupported argument count $OPTARG">&2; return 1;; 26 | (*:) ((++OPTIND)); _opt_handler_${OPTEXPD::-1} "${OPTIONS[$OPTIND]}";; 27 | (*) _opt_handler_$OPTEXPD;; 28 | esac 29 | } 30 | 31 | # shsplit "str" -> _shsplit_out[] 32 | # shlex.split()-like stuff 33 | # what, implementing that 'disregard quotes mid-token'? No.\ 34 | # robustness note: many end-quote replacments should test for the existance of the pattern first, 35 | # and return 42 if patt not present. 36 | # fixme: backslash even-odd not checked in patterns! this is fatal. 37 | # You will need to have an extra var to hold ${tmp##[!\\]} and count. 38 | shsplit(){ 39 | _shsplit_out=() 40 | shopt -s extglob 41 | local _shsplit_ksh_cquote=1 _shsplit_bash_moquote=1 42 | local i="$1" thisword='' tmp='' dquote_ret 43 | # debug tip: set -xv freaks out for `[['. 44 | while [[ $i ]]; do 45 | case $i in 46 | "'"*) # single quote, posix.1:2013v3c2s2.2.2 47 | i=${i#\'} 48 | # use till first "'" 49 | tmp=${i%%\'*} 50 | i=${i#"$tmp"\'} 51 | thisword+=$tmp 52 | ;; 53 | "\""*) # double quote, posix.1:2013v3c2s2.2.2 54 | _shsplit_dquote 55 | thisword+=$dquote_ret 56 | ;; 57 | "$'"*) # bash s3.1.2.4 58 | i=${i#'$'} 59 | if ((_shsplit_ksh_cquote)); then 60 | i=${i#\'} 61 | # dquote & norm magic 62 | tmp=${i%%!(!(\\)\\)\'*} 63 | i=${i#"$tmp"} 64 | tmp=${i:0:2} 65 | i=${i:3} 66 | # I am too lazy to play with you guys. Go get it, eval. 67 | eval "thisword+=$'$tmp'" 68 | else 69 | thisword+=\$ 70 | fi 71 | ;; 72 | '$"'*) # bash s3.1.2.5 73 | i=${i#'$'} 74 | if ((_shsplit_bash_moquote)); then 75 | _shsplit_dquote 76 | if ((_shsplit_bash_moquote == 2)); then 77 | # re-escape. dirty, right? 78 | # only do this when you fscking trust the input. 79 | # no, I will not escape \$ and \` for you. 80 | dquote_ret=${dquote_ret//\\/\\\\} 81 | dquote_ret=${dquote_ret//\"/\\\"} 82 | eval 'dquote_ret=$"'"$dquote_ret\"" 83 | # elif 3: gettext() ..... 84 | fi 85 | thisword+=$dquote_ret 86 | else 87 | thisword+=\$ 88 | fi 89 | ;; 90 | [[:space:]]*) 91 | [[ $thisword ]] && _shsplit_out+=("$thisword") 92 | thisword='' 93 | i=${i##+([[:space:]])} 94 | ;; 95 | *) 96 | _shsplit_eat_till_special 97 | ;; 98 | esac 99 | done 100 | [[ $thisword ]] && _shsplit_out+=("$thisword") 101 | } 102 | 103 | _shsplit_eat_till_special(){ 104 | local thisword2 105 | tmp=${i%%!(\\)[\$\'\"[:space:]]*} # first non-escaped crap 106 | i=${i#"$tmp"} 107 | tmp=${i:0:1} # add back the extra !(\\) char killed 108 | i=${i:1} 109 | _shsplit_soft_backslash 110 | thisword+=$thisword2 111 | } 112 | 113 | _shsplit_dquote(){ 114 | local thisword2 115 | i=${i#\"} 116 | tmp=${i%%!(!(\\)\\)\"*} # first non-escaped " 117 | i=${i#"$tmp"} 118 | tmp=${i:0:2} # add back the extra !(!(\\)\\) chars killed 119 | i=${i:3} # kill three -- including " 120 | _shsplit_soft_backslash 121 | dquote_ret=$thisword2 122 | } 123 | 124 | _shsplit_soft_backslash(){ 125 | local tmp2 126 | while [[ $tmp ]]; do 127 | case $tmp in 128 | '\\'*) 129 | tmp=${tmp#'\\'} 130 | thisword2+='\' 131 | ;; 132 | '\'$'\n'*) 133 | tmp=${tmp#'\'$'\n'} 134 | ;; 135 | '\'*) # means nothing 136 | tmp=${tmp#'\'} 137 | ;& # fallthru 138 | *) 139 | tmp2=${tmp%%\\*} 140 | tmp=${tmp#"$tmp2"} 141 | thisword2+=$tmp2 142 | ;; 143 | esac 144 | done 145 | } 146 | -------------------------------------------------------------------------------- /zhconv-merge.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # zhconv-merge.sh: Merge zh variant translations with OpenCC and msgmerge. 3 | 4 | usage="Usage: $0 OLD_FILE MERGE_ME_IN [POT_FILE=MERGE_ME_IN] 5 | if OLD_FILE is missing, assume creation of new file. 6 | 7 | Env vars: 8 | ZH_MSGMERGE_OPTS \`linear' array of extra flags for \`msgmerge'. 9 | Example: '-E -C \"my compendia.po\" -w 79 --previous' 10 | Default: '--previous' 11 | 12 | ZH_POST_OCC:function What to do after invoking OpenCC. To use, 13 | define a function with this name in bash, 14 | and export it with \`export -f ZH_POST_OCC'. 15 | Example: ZH_POST_OCC() { msgattrib --set-fuzzy \\ 16 | --no-fuzzy -o \"\$new.\$oldtype\"{,}; } 17 | " 18 | 19 | # This script comes with ABSOLUTELY NO WARRENTY, and can be used as if it is in 20 | # public domain, or (optionally) under the terms of CC0, WTFPL or Unlicense. 21 | 22 | # Please make sure that there is no Chinese characters in msgid; or bad things 23 | # will happen when opencc converts them by mistake. msgfilter can be used, but 24 | # it's super slow with it being called super many times. Same for sed. 25 | # Also, don't pass non-UTF8 files in. 26 | readonly {FALSE,NO,false,no}=0 {TRUE,YES,true,yes}=1 # boolean shorthands 27 | die(){ echo "Fatal: $1">&2; exit "${2-1}"; } 28 | info(){ echo "Info: $*">&2; } 29 | 30 | [ "$2" -a -e "$2" ] || die "Arguments invalid 31 | 32 | $usage" 33 | 34 | type opencc sed msgmerge >/dev/null || die "required command(s) not found" 35 | 36 | # Accept environment 'linear array' input. 37 | declare -a ZH_MSGMERGE_OPTS="(${ZH_MSGMERGE_OPTS:---previous})" 38 | 39 | type ZH_POST_OCC &>/dev/null || ZH_POST_OCC(){ :; } 40 | 41 | # OpenCC example cfgs used, all with Phrace Variants: 42 | # s2twp: CN -> TW 43 | # tw2sp: TW -> CN 44 | # s2hk: CN -> HK 45 | # hk2s: HK -> CN 46 | declare -A cn_t_word 47 | cn_t_word[ 函式 ]=函数 48 | cn_t_word[ 封存 ]=归档 49 | # Extra sed commands for conversion. 50 | to_cn_sed=( 51 | -r # ERE for grouping 52 | -e 's/函式/函数/g' # function 53 | -e 's/封存/归档/g' # archive 54 | -e 's/开启/打开/g' # open 55 | -e 's/命令稿/脚本/g' # script 56 | -e 's/盘案/文件/g' # file (save) 57 | -e 's/回传/返回/g' # return (function) 58 | -e 's/引数/参数/g' # argument (function) 59 | -e 's/签章/签名/g' # signature (PGP) 60 | -e 's/巨集/宏/g' # macro 61 | -e 's/魔术字符/幻数/g' # magic number 62 | -e 's/唯读/只读/g' # readonly 63 | -e 's/胚腾/模式/g' # pattern, un-standardly translated to 胚腾 in TW sometimes. 64 | -e 's/逾時/超时/g' # timed out 65 | -e 's/相依性/依赖关系/g' -e 's/相依/依赖/g' # dependency (pkgmgr) 66 | -e 's/万用匹配/通配符/g' -e 's/万用字符/通配符/g' # glob 67 | -e 's/([二八十]|十六)进位制?/\1进制/g' # bin, oct, dec, hex.. 68 | # -e 's/修补/补丁/g' # patch 69 | # -e 's/套件/软件包/g' # package 70 | # -e 's/异动/事务/'g # transaction 71 | -e 's/不容许/不允许/g' # not permitted 72 | -e 's/暂存盘/临时文件/g' # tmpfile, word_struct (暂存 盘) 73 | # -e 's/缩减/归约/g' # reduce (parser) 74 | -e 's/算子/算符/g' # operator (parser) 75 | -e 's/全域/全局/g' # global 76 | -e 's/做为/作为/g' # foo as(作为) bar 77 | -e 's/行程/进程/g' # process 78 | -e 's/润算/渲染/g' # render 79 | -e 's/堆栈/堆叠/g' # stack 80 | -e 's/指标/指针/g' # pointer 81 | -e 's/印出/打印/g' # print 82 | # -e 's/行/__CoLM_列__/g' -e 's/列/行/g' -e 's/__CoLM_列__/列/g' # different ideas on lines and cols 83 | -e 's/「/“/g' -e 's/」/”/g' -e 's/『/‘/g' -e 's/』/’/g' # crude quoting 84 | ) 85 | 86 | from_cn_sed=( 87 | -e 's/函数/函式/g' # function 88 | -e 's/归档/封存/g' # archive 89 | -e 's/宏/巨集/g' # macro 90 | -e 's/只读/唯读/g' # readonly 91 | -e 's/全局/全域/g' # global 92 | ) 93 | 94 | zhvar(){ 95 | case "$1" in 96 | (*zh[_-]CN*|*zh[_-]Hans*) 97 | echo "CN";; 98 | (*zh[_-]TW*|*zh[_-]Hant*) 99 | echo "TW";; 100 | (*zh[_-]HK*) echo "HK";; 101 | (*) echo "??" ;; 102 | esac 103 | } 104 | 105 | occ_conv(){ 106 | local occcfg 107 | case "$1,$2" in 108 | (CN,TW) occcfg=s2twp;; 109 | (TW,CN) occcfg=tw2sp;; 110 | (CN,HK) occcfg=s2hk;; 111 | (HK,CN) occcfg=hk2s;; 112 | (TW,HK) occcfg=tw2sp,s2hk;; 113 | (HK,TW) occcfg=hk2s,s2twp;; 114 | (CN,CN|HK,HK|TW,TW) 115 | occcfg=NULL;; 116 | esac 117 | if [ -z "$occcfg" ]; then 118 | die "Specified pair $1,$2 not supported. Add it yourself." 119 | fi 120 | do_occ "$occcfg" "$3" "$4" 121 | } 122 | 123 | do_occ(){ 124 | local curr IFS=, 125 | cp "$2" "${3:-$2}.work" || return 126 | for curr in $1; do 127 | [ "$curr" != NULL ] || continue 128 | opencc -c "$curr" -i "${3:-$2}.work" -o "${3:-$2}.work" || return 129 | done 130 | mv "${3:-$2}"{.work,} 131 | } 132 | 133 | old="$1" oldtype="$(zhvar "$old")" 134 | new="$2" newtype="$(zhvar "$new")" 135 | pot="${3:-$2}" 136 | 137 | if [ ! -e "$old" ]; then 138 | info "Creating $old." 139 | :> "$old" 140 | fi 141 | 142 | echo " 143 | OLD $oldtype $old 144 | NEW $newtype $new 145 | POT -- $pot 146 | " 147 | 148 | case "$newtype" in 149 | (CN) sed "${from_cn_sed[@]}" "$new" > "$new.$oldtype";; 150 | (*) cp "$new" "$new.$oldtype" 151 | esac 152 | 153 | occ_conv "$newtype" "$oldtype" "$new"{,".$oldtype"} || 154 | die "opencc returned $?." 155 | 156 | ZH_POST_OCC 157 | 158 | cp "$old"{,'~'} 159 | msgattrib --translated -o "$old"{,} 160 | msgcat -o "$old.all" --use-first "$old" "$new.$oldtype" 161 | 162 | msgmerge --lang="zh_$oldtype" "${ZH_MSGMERGE_OPTS[@]}" -o "$old"{,.all} "$pot" || 163 | die "msgmerge returned $?." 164 | 165 | case "$oldtype" in 166 | (CN) sed -i.pre_final "${to_cn_sed[@]}" "$old" 167 | OUTFILES+="SED $oldtype $old.pre_final"$'\n' 168 | esac 169 | 170 | echo " 171 | OUT $oldtype $old 172 | ALL $oldtype $old.all 173 | TMP $oldtype $new.$oldtype 174 | $OUTFILES 175 | Verify the results in a po editor, with some basic knowledge in zh_$oldtype." 176 | --------------------------------------------------------------------------------