├── aurbs ├── __init__.py ├── ui │ ├── __init__.py │ ├── .gitignore │ ├── static │ │ ├── bootstrap │ │ │ ├── img │ │ │ │ ├── glyphicons-halflings.png │ │ │ │ └── glyphicons-halflings-white.png │ │ │ ├── css │ │ │ │ └── bootstrap-responsive.min.css │ │ │ └── js │ │ │ │ └── bootstrap.min.js │ │ ├── js │ │ │ └── ansispan.js │ │ └── style.css │ ├── templates │ │ ├── package_list.html │ │ ├── problems.html │ │ ├── package_log.html │ │ ├── status.html │ │ ├── package_view.html │ │ └── base.html │ └── application.py ├── model.py ├── dummy.py ├── config.py ├── remotedb.py ├── static.py ├── webserver.py ├── helper.py ├── subproc.py ├── aur.py ├── aurinfo.py ├── pkg_parser.py └── db.py ├── .gitignore ├── templates ├── gpg.conf ├── aurbs.yml ├── lighttpd.conf.sample ├── pacman.conf.in └── makepkg.conf.in ├── Makefile ├── bin ├── aurbs.fcgi └── aurbs ├── setup.py ├── README.md ├── contrib └── tree.py └── init.py /aurbs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aurbs/ui/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aurbs/ui/.gitignore: -------------------------------------------------------------------------------- 1 | secret.key 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | .*.swp 4 | db 5 | cache 6 | build_db 7 | build 8 | -------------------------------------------------------------------------------- /templates/gpg.conf: -------------------------------------------------------------------------------- 1 | require-cross-certification 2 | keyserver hkp://keys.gnupg.net 3 | keyserver-options auto-key-retrieve 4 | -------------------------------------------------------------------------------- /aurbs/ui/static/bootstrap/img/glyphicons-halflings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aylen384/aurbs/HEAD/aurbs/ui/static/bootstrap/img/glyphicons-halflings.png -------------------------------------------------------------------------------- /aurbs/ui/static/bootstrap/img/glyphicons-halflings-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aylen384/aurbs/HEAD/aurbs/ui/static/bootstrap/img/glyphicons-halflings-white.png -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | .PHONY: install clean 3 | 4 | ifndef DESTDIR 5 | DESTDIR=/ 6 | endif 7 | 8 | install: 9 | python setup.py install --optimize=1 --prefix=/usr --root=$(DESTDIR) 10 | python init.py $(DESTDIR) 11 | mkdir -p $(DESTDIR)/usr/share/aurbs/ui 12 | cp -r aurbs/ui/{templates,static} $(DESTDIR)/usr/share/aurbs/ui/ 13 | 14 | clean: 15 | rm -rf ./build 16 | find -name "__pycache__" | xargs rm -rf 17 | -------------------------------------------------------------------------------- /bin/aurbs.fcgi: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | 5 | from flup.server.fcgi import WSGIServer 6 | #from wsgigzip import GzipMiddleware 7 | from werkzeug.contrib.fixers import LighttpdCGIRootFix 8 | from aurbs.ui.application import app 9 | 10 | if __name__ == '__main__': 11 | app.debug = True 12 | app.use_x_sendfile = True 13 | ui_folder = '/usr/share/aurbs/ui' 14 | app.static_folder = os.path.join(ui_folder, 'static') 15 | app.template_folder = os.path.join(ui_folder, 'templates') 16 | #app = GzipMiddleware(app) 17 | app = LighttpdCGIRootFix(app) 18 | WSGIServer(app).run() 19 | -------------------------------------------------------------------------------- /aurbs/model.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | class Singleton(object): 4 | def __init__(self, cls): 5 | self.cls = cls 6 | self.instance = None 7 | def __call__(self, *args, **kwargs): 8 | if self.instance is None: 9 | self.instance = self.cls(*args, **kwargs) 10 | return self.instance 11 | 12 | class Dependency(object): 13 | # old build is available 14 | ok = 0 15 | 16 | # new build is available 17 | rebuilt = 1 18 | 19 | # something went wrong (result=blocked or failed) 20 | blocked = 2 21 | 22 | class FatalError(Exception): 23 | pass 24 | 25 | class PKGSyncNeeded(Exception): 26 | pass 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | from distutils.core import setup 4 | 5 | setup( 6 | name='aurbs', 7 | version='2.2.1', 8 | license='GPL', 9 | description='Automatic AUR package building system', 10 | author='Dominik Heidler', 11 | author_email='dominik@heidler.eu', 12 | url='http://github.com/asdil12/aurbs', 13 | requires=['flask', 'flup'], 14 | packages=['aurbs', 'aurbs.ui'], 15 | scripts=['bin/aurbs'], 16 | data_files=[ 17 | ('/etc', ['templates/aurbs.yml']), 18 | ('/usr/share/aurbs/cfg', ['templates/gpg.conf']), 19 | ('/usr/share/doc/aurbs', ['templates/lighttpd.conf.sample']), 20 | ], 21 | ) 22 | -------------------------------------------------------------------------------- /aurbs/dummy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | from aurbs import model 4 | from aurbs.config import AurBSConfig 5 | 6 | def aurpkg(pkgname): 7 | pkg = { 8 | '_id': None, 9 | 'name': pkgname, 10 | 'description': 'n/a', 11 | 'license': 'n/a', 12 | 'votes': 'n/a', 13 | 'maintainer': 'n/a', 14 | 'version': 'n/a', 15 | 'arch': AurBSConfig().architectures, 16 | 'depends': [], 17 | 'makedepends': [], 18 | 'provides': [], 19 | 'srcpkg': 'https://aur.archlinux.org/packages/%s/%s/%s.tar.gz' % (pkgname[0:2], pkgname, pkgname), 20 | 'results': {}, 21 | 'dummy': True, 22 | # ... 23 | } 24 | return pkg 25 | 26 | -------------------------------------------------------------------------------- /templates/aurbs.yml: -------------------------------------------------------------------------------- 1 | database: 2 | host: localhost 3 | port: 27017 4 | database: aurbs 5 | # Only define this, if db requires auth: 6 | #user: aurbs 7 | #pass: dbpass 8 | 9 | public_repo: 10 | name: aurbs 11 | # http_url: Alternative public url 12 | # otherwise / will be used and served 13 | #http_url: http://domain.net/aurbs 14 | rsync: 15 | # url: RSYNC target url to sync to after building 16 | # pass: RSYNC password 17 | #url: aurbs@domain.net::aurbs 18 | #pass: secretrsyncpwd 19 | 20 | architectures: 21 | - x86_64 22 | - i686 23 | 24 | aurpkgs: 25 | # - libuhd 26 | # - gnuradio 27 | # - gr-osmosdr 28 | # - gqrx 29 | -------------------------------------------------------------------------------- /aurbs/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | from yaml import load, dump 4 | try: 5 | from yaml import CLoader as Loader, CDumper as Dumper 6 | except ImportError: 7 | from yaml import Loader, Dumper 8 | 9 | from aurbs.model import Singleton 10 | 11 | @Singleton 12 | class AurBSConfig(object): 13 | def __init__(self, configfile): 14 | self.configfile = configfile 15 | ctxt = open(configfile, 'r').read() 16 | # who said, I couldn't use tabs in yaml? 17 | ctxt = ctxt.replace("\t", " ") 18 | self.config = load(ctxt, Loader=Loader) 19 | 20 | def __getattr__(self, key): 21 | return self.config[key] 22 | 23 | def __getitem__(self, key): 24 | return self.config[key] 25 | -------------------------------------------------------------------------------- /aurbs/ui/templates/package_list.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Packages{% endblock %} 4 | 5 | {% block path %} 6 | {{ super () }} 7 | {{ bclink('package_list', 'Packages') }} 8 | {% endblock %} 9 | 10 | {% block container %} 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | {% for pkg in pkgs %} 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | {% endfor %} 28 |
NameVersionVotesDescriptionArch
{{ pkg.name }}{{ pkg.version }}{{ pkg.votes }}{{ pkg.description|truncate(60, end='…')|replace(' …', '…', 1) }}{{ ", ".join(pkg.arch) }}
29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /aurbs/remotedb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import pyalpm 5 | from pycman.config import PacmanConfig 6 | 7 | class RemoteDB(object): 8 | def __init__(self, root='/'): 9 | self.root = root 10 | pc = PacmanConfig(conf=self._rp('/etc/pacman.conf')) 11 | # Add root path prefix, as alpm seems to expect absolute paths 12 | for option in ['RootDir', 'DBPath', 'GPGDir', 'LogFile']: 13 | pc.options[option] = self._rp(pc.options[option]) 14 | self.handle = pc.initialize_alpm() 15 | 16 | def _rp(self, path): 17 | """ 18 | Add the root prefix to a path 19 | """ 20 | return os.path.join(self.root, path.lstrip('/')) 21 | 22 | def get_pkg(self, pkgname): 23 | """ 24 | Get a pkg, which provides pkgname 25 | """ 26 | dbs = self.handle.get_syncdbs() 27 | for db in dbs: 28 | pkg = pyalpm.find_satisfier(db.pkgcache, pkgname) 29 | if pkg is not None: 30 | return pkg 31 | -------------------------------------------------------------------------------- /aurbs/static.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | 5 | cachedir = '/var/cache/pacman/pkg/' 6 | srcpkgdir = '/var/cache/aurbs/srcpkgs/' 7 | pkg_db_dir = '/var/lib/aurbs/pkg_db' 8 | 9 | def chroot(arch): 10 | return os.path.join('/var/lib/aurbs/chroot', arch) 11 | 12 | def chroot_root(arch): 13 | return os.path.join(chroot(arch), 'root') 14 | 15 | def chroot_build(arch): 16 | return os.path.join(chroot(arch), 'build') 17 | 18 | def build_dir(arch): 19 | return os.path.join('/var/cache/aurbs/build', arch) 20 | 21 | def ccache_dir(arch): 22 | return os.path.join('/var/cache/aurbs/ccache', arch) 23 | 24 | def repodir(arch): 25 | return os.path.join('/var/lib/aurbs/aurstaging', arch) 26 | 27 | def repodir_public(arch): 28 | return os.path.join('/var/lib/aurbs/public_repo', arch) 29 | 30 | def build_db_dir(arch): 31 | return os.path.join('/var/lib/aurbs/build_db', arch) 32 | 33 | def fails_dir(arch): 34 | return os.path.join('/var/lib/aurbs/fails', arch) 35 | 36 | def blocks_dir(arch): 37 | return os.path.join('/var/lib/aurbs/blocks', arch) 38 | -------------------------------------------------------------------------------- /templates/lighttpd.conf.sample: -------------------------------------------------------------------------------- 1 | server.port = 80 2 | server.username = "http" 3 | server.groupname = "http" 4 | server.document-root = "/srv/http" 5 | server.errorlog = "/var/log/lighttpd/error.log" 6 | server.breakagelog = "/var/log/lighttpd/breakage.log" 7 | dir-listing.activate = "enable" 8 | index-file.names = ( ) 9 | 10 | mimetype.assign = ( 11 | ".html" => "text/html", 12 | ".txt" => "text/plain", 13 | ".jpg" => "image/jpeg", 14 | ".png" => "image/png", 15 | ".css" => "text/css", 16 | ".js" => "text/javascript", 17 | ".xz" => "application/x-xz", 18 | ".gz" => "application/x-gzip", 19 | "" => "application/octet-stream" 20 | ) 21 | 22 | server.modules = ( 23 | "mod_alias", 24 | "mod_rewrite", 25 | "mod_fastcgi", 26 | ) 27 | 28 | alias.url = ( 29 | "/aurstaging" => "/var/lib/aurbs/aurstaging/", 30 | "/aurbs" => "/var/lib/aurbs/public_repo/" 31 | ) 32 | 33 | url.rewrite-once = ( 34 | "^(/aurstaging.*)$" => "$1", 35 | "^(/aurbs.*)$" => "$1", 36 | "^(/.*)$" => "/aurbs.fcgi$1" 37 | ) 38 | 39 | fastcgi.server = ("/aurbs.fcgi" => 40 | (( 41 | "socket" => "/tmp/aurbs-fcgi.sock", 42 | "bin-path" => "/usr/lib/aurbs/aurbs.fcgi", 43 | "check-local" => "disable", 44 | "max-procs" => 1, 45 | "fix-root-scriptname" => "enable", 46 | "allow-x-send-file" => "enable" 47 | )) 48 | ) 49 | 50 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [!WARNING] 2 | > Moved to https://codeberg.org/asdil12/aurbs 3 | 4 | # aurbs 5 | 6 | This is a tool to build a binary repo for aur pkgs. You simply provide a list of pkgs to build, and run it. It will then download the pkgs, compile them, (detect dependency issues and build failures) and publish the pkgs in a repo. It can even build aur-pkgs, that depend on other aur-pkgs. When you then run aurbs the next time, it will detect pkg updates (or updated dependencies) and rebuild the corresponding pkgs. It also provides a web-ui where the build-status, the build-log, and the results can be displayed, and where the pkgs can be downloaded. 7 | 8 | ## Installation 9 | 10 | - `pacman -S devtools rsync python-setuptools python-simplejson python-yaml python-pymongo python-flask pyalpm ccache` 11 | - `yaourt -S python-flup-hg` 12 | - run `make install` 13 | - `useradd --system -c 'aurbs daemon user' -g daemon -d /var/cache/aurbs -s /bin/bash aurbs` 14 | - `chown -R aurbs: /var/cache/aurbs/ccache/*` 15 | - `chown -R aurbs: /var/cache/aurbs/build/*` 16 | - set PACKAGER in `/etc/makepkg.conf` 17 | - modify `/etc/aurbs.yml` to define you pkgs and archs 18 | - make sure that mongodb is running 19 | - run `aurbs` to build pkgs 20 | 21 | 22 | For the UI: 23 | - for better performance configure your webserver to allow x-sendfile or manually serve 24 | /var/lib/aurbs/aurstaging to /aurstaging 25 | - same reason, for the public repo: if you want to serve it on another url, specify that url in config 26 | - sample config for lighttpd: `templates/lighttpd.conf.sample` 27 | -------------------------------------------------------------------------------- /aurbs/webserver.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import sys 5 | import http.server 6 | import socketserver 7 | import posixpath 8 | import urllib.request, urllib.parse, urllib.error 9 | import threading 10 | import logging 11 | 12 | log = logging.getLogger('aurbs') 13 | 14 | class WebServer(object): 15 | """ 16 | w = WebServer('aurstaging', 8000) 17 | 18 | import time 19 | time.sleep(10) 20 | 21 | w.stop() 22 | """ 23 | def __init__(self, subdir, port): 24 | self.subdir = subdir 25 | self.port = port 26 | 27 | class Server(socketserver.TCPServer): 28 | allow_reuse_address = True 29 | def handle_error(self, request, client_address): 30 | cas = '(%s:%i)' % client_address 31 | type_, value_, traceback_ = sys.exc_info() 32 | #log.debug('Webserver error handling request from: %s: %s' % (client_address, value_.__repr__())) 33 | 34 | class Handler(http.server.SimpleHTTPRequestHandler): 35 | def translate_path(self, path): 36 | path = path.split('?',1)[0] 37 | path = path.split('#',1)[0] 38 | path = posixpath.normpath(urllib.parse.unquote(path)) 39 | words = path.split('/') 40 | words = [w for w in words if w] 41 | path = os.path.join(os.getcwd(), subdir) 42 | for word in words: 43 | drive, word = os.path.splitdrive(word) 44 | head, word = os.path.split(word) 45 | if word in (os.curdir, os.pardir): continue 46 | path = os.path.join(path, word) 47 | return path 48 | def log_message(self, format, *args): 49 | log.debug("web-%i: %s - - %s" % (port, self.client_address[0], format%args)) 50 | 51 | self.httpd = Server(('127.0.0.1', self.port), Handler) 52 | self.t = threading.Thread(target=self.httpd.serve_forever) 53 | self.t.daemon = True 54 | self.t.start() 55 | log.debug("Start serving '%s' at port %i" % (self.subdir, self.port)) 56 | 57 | def stop(self): 58 | log.debug("Stop serving '%s' at port %i" % (self.subdir, self.port)) 59 | self.httpd.shutdown() 60 | self.httpd.server_close() 61 | self.t.join() 62 | -------------------------------------------------------------------------------- /aurbs/helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import re 5 | from pkg_resources import parse_version 6 | 7 | from aurbs.config import AurBSConfig 8 | from aurbs.remotedb import RemoteDB 9 | from aurbs.static import * 10 | 11 | _re_strip_ver = re.compile(r"([^<>=]*)[<>]?=?") 12 | 13 | def clean_dep_ver(deps): 14 | deps_out = [] 15 | for dep in deps: 16 | deps_out.append(_re_strip_ver.match(dep).group(1)) 17 | return deps_out 18 | 19 | 20 | _remote_dbs = {} 21 | 22 | def remote_pkgver(pkgname, arch): 23 | global _remote_dbs 24 | remote_db = _remote_dbs.get(arch, RemoteDB(chroot_root(arch))) 25 | if arch not in _remote_dbs: 26 | _remote_dbs[arch] = remote_db 27 | try: 28 | return remote_db.get_pkg(pkgname).version 29 | except AttributeError: 30 | raise KeyError("No provider found for remote pkg '%s'" % pkgname) 31 | 32 | 33 | def version_newer(old, new): 34 | return parse_version(new) > parse_version(old) 35 | 36 | def find_pkg_files(pkgname=None, directory=None): 37 | if not directory: 38 | return [] 39 | respkgs = [] 40 | for item in os.listdir(directory): 41 | if item.endswith('pkg.tar.xz'): 42 | [ipkgname, ipkgver, ipkgrel, iarch] = item.rsplit("-", 3) 43 | if pkgname is None or ipkgname == pkgname: 44 | respkgs.append(item) 45 | elif not isinstance(pkgname, str) and ipkgname in pkgname: 46 | # splitpkg: pkgname is list of subpkgs 47 | respkgs.append(item) 48 | return respkgs 49 | 50 | def by_name(dictlist, name): 51 | return filter(lambda i: i['name'] == name, dictlist).__next__() 52 | 53 | def set_chmod(basedir, dirs, files): 54 | os.chmod(basedir, dirs) 55 | for r, ds, fs in os.walk(basedir): 56 | for d in ds: 57 | os.chmod(os.path.join(r, d), dirs) 58 | for f in fs: 59 | os.chmod(os.path.join(r, f), files) 60 | 61 | def try_mkdir(directory): 62 | try: 63 | os.makedirs(directory) 64 | except OSError as e: 65 | if not e.errno == 17: 66 | raise 67 | 68 | class AurInfoEcatcher(object): 69 | def __init__(self, pkgname, logger): 70 | self.pkgname = pkgname 71 | self.logger = logger 72 | 73 | def Catch(self, lineno, error): 74 | self.logger.warning("Parse error in '%s' SRCINFO line %d: %s" % (self.pkgname, lineno, error)) 75 | -------------------------------------------------------------------------------- /contrib/tree.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import sys 5 | import simplejson as json 6 | 7 | import networkx as nx 8 | import matplotlib.pyplot as plt 9 | 10 | """ 11 | sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '../..')) 12 | from aurbs import dummy 13 | from aurbs.db import Database 14 | from aurbs.config import AurBSConfig 15 | from aurbs.static import * 16 | from aurbs.helper import * 17 | 18 | 19 | AurBSConfig("/etc/aurbs.yml") 20 | db = Database() 21 | 22 | pkgs = [] 23 | tree = {} 24 | 25 | def add(pkgname): 26 | try: 27 | pkg = db.get_pkg(pkgname) 28 | except KeyError: 29 | pkg = dummy.aurpkg(pkgname) 30 | deps = [] 31 | for dep in pkg['depends']: 32 | if dep in AurBSConfig().aurpkgs: 33 | deps.append(dep) 34 | for dep in pkg['makedepends']: 35 | if dep in AurBSConfig().aurpkgs: 36 | deps.append(dep) 37 | tree[pkgname] = deps 38 | 39 | 40 | for pkgname in AurBSConfig().aurpkgs: 41 | add(pkgname) 42 | #json.dump(tree, open('/tmp/dep.json', 'w')) 43 | """ 44 | 45 | tree = json.load(open('/tmp/dep.json', 'r')) 46 | 47 | 48 | items = [] 49 | 50 | for pkgname, deps in tree.items(): 51 | for dep in deps: 52 | items.append([pkgname, dep]) 53 | if not deps: 54 | items.append([pkgname, "aurbs-root"]) 55 | 56 | G = nx.MultiDiGraph() 57 | G.add_edges_from(items) 58 | 59 | groups = {} 60 | ypos = {} 61 | pos = {} 62 | 63 | def look(pkgname, deepth=0): 64 | if pkgname not in groups or groups[pkgname] < deepth: 65 | groups[pkgname] = deepth 66 | newdeepth = deepth + 1 67 | for depending_pkg in G.predecessors_iter(pkgname): 68 | look(depending_pkg, newdeepth) 69 | 70 | look("aurbs-root") 71 | 72 | for pkgname, group in groups.items(): 73 | x = group 74 | y = ypos.get(group, 0) 75 | m = 1 if x > 1 else 0 76 | m = 1 77 | pos[pkgname] = (x, y * m) 78 | ypos[group] = y + 1 79 | 80 | pos["aurbs-root"] = (0, ypos[1] / 2) 81 | 82 | #pos=nx.spring_layout(G, iterations=10) 83 | #nx.draw(G,pos) 84 | #H=G.to_directed() 85 | #pos=nx.graphviz_layout(G,prog='twopi',args='') 86 | #plt.figure(figsize=(8,8)) 87 | 88 | 89 | plt.figure(figsize=(20,40)) 90 | nx.draw(G, pos, node_size=300, node_color='b')#, height=(ypos[0] * 300) ) 91 | plt.savefig("/tmp/dep.svg") # save as png 92 | #plt.show() 93 | -------------------------------------------------------------------------------- /aurbs/ui/static/js/ansispan.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2011 by Maciej Małecki 3 | * 4 | * Permission is hereby granted, free of charge, to any person obtaining a copy 5 | * of this software and associated documentation files (the "Software"), to deal 6 | * in the Software without restriction, including without limitation the rights 7 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | * copies of the Software, and to permit persons to whom the Software is 9 | * furnished to do so, subject to the following conditions: 10 | * 11 | * The above copyright notice and this permission notice shall be included in 12 | * all copies or substantial portions of the Software. 13 | * 14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 20 | * THE SOFTWARE. 21 | * 22 | * https://github.com/mmalecki/ansispan/ 23 | * 24 | */ 25 | 26 | 27 | /* 28 | * see http://en.wikipedia.org/wiki/ANSI_escape_code#Colors 29 | * 30 | * Sequence start: »\033[« 31 | * Sequence end: »m« 32 | * Sequence content: SGR parameters, seperated with »;« 33 | */ 34 | 35 | var ansispan = function (str) { 36 | str = str.replace(/\r\n/mg, "\n"); 37 | str = str.replace(/\033\(B/mg, ""); 38 | while( !(str.indexOf("\r") === -1) ) { 39 | str = str.replace(/^.*\r([^\r]*)$/mg, "$1") 40 | } 41 | 42 | str = str.replace(/\033\[([^m]*)m/g, function(sequence) { 43 | var sgr_params = RegExp.$1.split(';'); 44 | var style = []; 45 | if (sgr_params.length == 0 || sgr_params.indexOf('0') > -1) { 46 | style.push( "sgr0" ); 47 | } 48 | else { 49 | sgr_params.forEach(function(sgr) { 50 | style.push( 'sgr' + sgr ); 51 | }); 52 | } 53 | return ''; 54 | }); 55 | 56 | return str; 57 | }; 58 | 59 | 60 | if (typeof module !== 'undefined' && module.exports) { 61 | module.exports = ansispan; 62 | } 63 | 64 | -------------------------------------------------------------------------------- /aurbs/ui/static/style.css: -------------------------------------------------------------------------------- 1 | td ul { 2 | list-style: none outside none; 3 | margin: 0; 4 | padding: 0; 5 | } 6 | 7 | .resultlabel { 8 | width: 90%; 9 | } 10 | 11 | a.resultlabel:not([href]) { 12 | cursor: default !important; 13 | } 14 | 15 | .loading { 16 | background-image: url(data:image/gif;base64,R0lGODlhEAALAPQAAN7e3oiIiNHR0c3NzdbW1omJiYiIiJeXl7Ozs6enp8bGxpKSkqCgoLa2tqmpqcjIyJSUlIiIiKKiotXV1dDQ0Nra2pqamtLS0tnZ2cXFxb29vcvLy9jY2AAAAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh/hpDcmVhdGVkIHdpdGggYWpheGxvYWQuaW5mbwAh+QQJCwAAACwAAAAAEAALAAAFLSAgjmRpnqSgCuLKAq5AEIM4zDVw03ve27ifDgfkEYe04kDIDC5zrtYKRa2WQgAh+QQJCwAAACwAAAAAEAALAAAFJGBhGAVgnqhpHIeRvsDawqns0qeN5+y967tYLyicBYE7EYkYAgAh+QQJCwAAACwAAAAAEAALAAAFNiAgjothLOOIJAkiGgxjpGKiKMkbz7SN6zIawJcDwIK9W/HISxGBzdHTuBNOmcJVCyoUlk7CEAAh+QQJCwAAACwAAAAAEAALAAAFNSAgjqQIRRFUAo3jNGIkSdHqPI8Tz3V55zuaDacDyIQ+YrBH+hWPzJFzOQQaeavWi7oqnVIhACH5BAkLAAAALAAAAAAQAAsAAAUyICCOZGme1rJY5kRRk7hI0mJSVUXJtF3iOl7tltsBZsNfUegjAY3I5sgFY55KqdX1GgIAIfkECQsAAAAsAAAAABAACwAABTcgII5kaZ4kcV2EqLJipmnZhWGXaOOitm2aXQ4g7P2Ct2ER4AMul00kj5g0Al8tADY2y6C+4FIIACH5BAkLAAAALAAAAAAQAAsAAAUvICCOZGme5ERRk6iy7qpyHCVStA3gNa/7txxwlwv2isSacYUc+l4tADQGQ1mvpBAAIfkECQsAAAAsAAAAABAACwAABS8gII5kaZ7kRFGTqLLuqnIcJVK0DeA1r/u3HHCXC/aKxJpxhRz6Xi0ANAZDWa+kEAA7AAAAAAAAAAAA); 17 | background-repeat: no-repeat; 18 | background-position: center calc(100% - 0.6em); 19 | padding-bottom: 18px; 20 | } 21 | 22 | .breadcrumb li:nth-last-child(1) a { 23 | color: #999999 !important; 24 | cursor: text !important; 25 | text-decoration: none !important; 26 | } 27 | 28 | /* overwrite label[href] stuff */ 29 | .resultlabel.label-success[href] { 30 | background-color: #468847; 31 | } 32 | .resultlabel.label-important[href] { 33 | background-color: #B94A48; 34 | } 35 | .resultlabel.label-info[href] { 36 | background-color: #3A87AD; 37 | } 38 | 39 | 40 | /* ANSI colors - see http://en.wikipedia.org/wiki/ANSI_escape_code#Colors */ 41 | .sgr0, .sgr { 42 | color: #333333; 43 | font-weight: normal; 44 | font-style: normal; 45 | } 46 | .sgr1 { font-weight: bold; } 47 | .sgr3 { font-style: italic; } 48 | 49 | .sgr21 { font-weight: normal; } 50 | .sgr23 { font-style: normal; } 51 | 52 | /* foreground colors */ 53 | .sgr30 { color: #333333; } 54 | .sgr31 { color: #bd362f; } 55 | .sgr32 { color: #51a351; } 56 | .sgr33 { color: #f89406; } 57 | .sgr34 { color: #0044cc; } 58 | .sgr35 { color: #purple; } 59 | .sgr36 { color: #2f96b4; } 60 | .sgr37 { color: #white; } 61 | .sgr39 { color: #333333; } 62 | -------------------------------------------------------------------------------- /init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import subprocess 5 | import sys 6 | import shutil 7 | 8 | archs = ['i686', 'x86_64'] 9 | 10 | tplvars = { 11 | 'makepkg': { 12 | 'i686': { 13 | 'CARCH': 'i686', 14 | 'CHOST': 'i686-pc-linux-gnu', 15 | 'MARCH': 'i686' 16 | }, 17 | 'x86_64': { 18 | 'CARCH': 'x86_64', 19 | 'CHOST': 'x86_64-unknown-linux-gnu', 20 | 'MARCH': 'x86-64' 21 | }, 22 | }, 23 | 'pacman': { 24 | 'i686': { 25 | 'ARCH': 'i686' 26 | }, 27 | 'x86_64': { 28 | 'ARCH': 'x86_64' 29 | } 30 | } 31 | } 32 | 33 | if len(sys.argv) > 1: 34 | root = sys.argv[1] 35 | else: 36 | root = '/' 37 | 38 | def rp(path): 39 | return os.path.join(root, path) 40 | 41 | def mkdir(path): 42 | path = rp(path) 43 | print(path) 44 | try: 45 | os.makedirs(path) 46 | except OSError as e: 47 | if not e.errno == 17: 48 | raise 49 | 50 | def copy(src, dst): 51 | path = rp(dst) 52 | print(path) 53 | shutil.copy(src, path) 54 | 55 | def process_template(infile, outfile, tvars, skip_existing=False): 56 | txt = open(os.path.join('templates', infile)).read() 57 | for key, value in tvars.items(): 58 | txt = txt.replace('%%%s%%' % key, value) 59 | outfile = rp(outfile) 60 | if not skip_existing or not os.path.exists(outfile): 61 | print(outfile) 62 | open(outfile, 'w').write(txt) 63 | 64 | def dummy_repo_db(repo_path): 65 | repo_path = rp(repo_path) 66 | print(os.path.join(repo_path, 'aurstaging.db')) 67 | try: 68 | if not os.path.exists(os.path.join(repo_path, 'aurstaging.db.tar.gz')): 69 | subprocess.call(['tar', 'czf', 'aurstaging.db.tar.gz', '--files-from', '/dev/null'], cwd=repo_path) 70 | if not os.path.exists(os.path.join(repo_path, 'aurstaging.db')): 71 | os.symlink('aurstaging.db.tar.gz', os.path.join(repo_path, 'aurstaging.db')) 72 | except OSError: 73 | # File exists 74 | pass 75 | 76 | mkdir('usr/lib/aurbs') 77 | copy('bin/aurbs.fcgi', 'usr/lib/aurbs/aurbs.fcgi') 78 | 79 | mkdir('usr/share/aurbs/cfg') 80 | mkdir('var/cache/aurbs/srcpkgs') 81 | 82 | mkdir('var/lib/aurbs/aurstaging/any') 83 | 84 | for arch in archs: 85 | process_template('makepkg.conf.in', 'usr/share/aurbs/cfg/makepkg.conf.%s' % arch, tplvars['makepkg'][arch]) 86 | process_template('pacman.conf.in', 'usr/share/aurbs/cfg/pacman.conf.%s' % arch, tplvars['pacman'][arch]) 87 | 88 | mkdir('var/cache/aurbs/build/%s' % arch) 89 | 90 | mkdir('var/cache/aurbs/ccache/%s' % arch) 91 | 92 | mkdir('var/lib/aurbs/chroot/%s' % arch) 93 | 94 | mkdir('var/lib/aurbs/aurstaging/%s' % arch) 95 | dummy_repo_db('var/lib/aurbs/aurstaging/%s' % arch) 96 | -------------------------------------------------------------------------------- /aurbs/subproc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import sys 5 | import subprocess 6 | import threading 7 | import signal 8 | import logging 9 | import pty 10 | 11 | log = logging.getLogger('aurbs') 12 | 13 | # rediret bytes directly to from subprocess to stdout 14 | # to prevent UTF-8 decoding errors 15 | out = os.fdopen(sys.stdout.fileno(), 'wb') 16 | 17 | class PTYOutputProxy(object): 18 | def __init__(self, pty_master_fd): 19 | self.master = pty_master_fd 20 | 21 | def pass_through(self): 22 | try: 23 | while True: 24 | out.write(os.read(self.master, 1024)) 25 | out.flush() 26 | except OSError: 27 | pass 28 | 29 | def call(*popenargs, timeout=None, interrupt=None, int_active_child=False, **kwargs): 30 | """ 31 | same args as subprocess.Popen 32 | interrupt: signal to send on exception 33 | """ 34 | if int_active_child: 35 | # this is needed, if the subprocess (or its childs) 36 | # spawn a new pgroup (eg. systemd-nspawn) 37 | pid, master = pty.fork() 38 | if pid == 0: 39 | p = subprocess.Popen(*popenargs, **kwargs) 40 | os._exit(p.wait(timeout=timeout)) 41 | try: 42 | output = PTYOutputProxy(master) 43 | output_thread = threading.Thread(target=output.pass_through) 44 | output_thread.daemon = True 45 | output_thread.start() 46 | return os.waitpid(pid, 0)[1] 47 | except: 48 | if interrupt == signal.SIGINT: 49 | log.debug("Sending Ctrl-C via stdin to subprocess") 50 | os.write(master, b"\x03") 51 | try: 52 | os.waitpid(pid, 0) 53 | except Exception: 54 | pass 55 | raise 56 | else: 57 | with subprocess.Popen(*popenargs, stdin=subprocess.PIPE, preexec_fn=os.setsid, **kwargs) as p: 58 | try: 59 | return p.wait(timeout=timeout) 60 | except: 61 | if interrupt: 62 | log.debug("Sending signal to subprocess pgroup") 63 | os.killpg(p.pid, interrupt) 64 | else: 65 | log.warning("KeyboardInterrupt detected! - exiting subprocess...") 66 | p.wait(timeout=timeout) 67 | raise 68 | 69 | def ccall(*popenargs, **kwargs): 70 | """Run command with arguments. Wait for command to complete. If 71 | the exit code was zero then return, otherwise raise 72 | CalledProcessError. The CalledProcessError object will have the 73 | return code in the returncode attribute. 74 | 75 | The arguments are the same as for the call function. Example: 76 | 77 | ccall(["ls", "-l"]) 78 | """ 79 | retcode = call(*popenargs, **kwargs) 80 | if retcode: 81 | cmd = kwargs.get("args") 82 | if cmd is None: 83 | cmd = popenargs[0] 84 | raise subprocess.CalledProcessError(retcode, cmd) 85 | return 0 86 | -------------------------------------------------------------------------------- /aurbs/ui/templates/problems.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Problems{% endblock %} 4 | 5 | {% block path %} 6 | {{ super () }} 7 | {{ bclink('problems', 'Problems') }} 8 | {% endblock %} 9 | 10 | {% block container %} 11 |

Failed packages

12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | {% for pkg in pkgs_failed %} 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | {% endfor %} 29 |
NameTimeVersionDescriptionArch
{{ pkg.name }}{{ pkg.date|datetimeformat }}{{ pkg.version }}{{ pkg.description|truncate(60, end='…')|replace(' …', '…', 1) }}{{ pkg.arch }}
30 | 31 |

Packages not found in AUR

32 | 33 | 34 | 35 | 36 | 37 | 38 | {% for pkg in pkgs_notinaur %} 39 | 40 | 41 | 42 | 43 | 44 | {% endfor %} 45 |
NameTimeArch
{{ pkg.name }}{{ pkg.date|datetimeformat }}{{ pkg.arch }}
46 | 47 |

Blocked packages (missing dependencies)

48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | {% for pkg in pkgs_depmiss %} 56 | 57 | 58 | 59 | 60 | 61 | 62 | {% endfor %} 63 |
NameTimeMissing dependenciesArch
{{ pkg.name }}{{ pkg.date|datetimeformat }}{{ ", ".join(pkg.depends) }}{{ pkg.arch }}
64 | 65 |

Blocked packages (blocked dependencies)

66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | {% for pkg in pkgs_blocked %} 74 | 75 | 76 | 77 | 78 | 79 | 80 | {% endfor %} 81 |
NameTimeBlocked dependenciesArch
{{ pkg.name }}{{ pkg.date|datetimeformat }}{{ ", ".join(pkg.depends) }}{{ pkg.arch }}
82 | {% endblock %} 83 | -------------------------------------------------------------------------------- /aurbs/aur.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import simplejson as json 5 | import urllib.request, urllib.parse, urllib.error 6 | import urllib.request, urllib.error, urllib.parse 7 | from optparse import OptionParser 8 | 9 | from aurbs.static import * 10 | 11 | target_url = "http://aur.archlinux.org/rpc.php" 12 | 13 | class AppURLopener(urllib.request.FancyURLopener): 14 | version = 'AurBS/1.0 python' 15 | 16 | urllib.request._urlopener = AppURLopener() 17 | 18 | def search(args): 19 | params = urllib.parse.urlencode({'type':'search', 'arg':args}) 20 | response = urllib.request.urlopen("%s?%s" % (target_url,params)).read() 21 | print_results(json.loads(response)) 22 | 23 | def info(args): 24 | params = urllib.parse.urlencode({'type':'info', 'arg':args}) 25 | response = urllib.request.urlopen("%s?%s" % (target_url,params)).read() 26 | print_results(json.loads(response)) 27 | 28 | def convert_data(r): 29 | return { 30 | "maintainer": r['Maintainer'], 31 | "description": r['Description'], 32 | "license": r['License'], 33 | "id": r['ID'], 34 | "version": r['Version'], 35 | "name": r['Name'], 36 | "pkgbase": r['PackageBase'], 37 | "srcpkg": 'https://aur.archlinux.org' + r['URLPath'], 38 | "votes": r['NumVotes'], 39 | } 40 | 41 | def get_pkg(pkgname, failcount=0): 42 | params = urllib.parse.urlencode({'type':'info', 'arg':pkgname}) 43 | response = urllib.request.urlopen("%s?%s" % (target_url,params)).read() 44 | result = json.loads(response) 45 | if not result['resultcount'] == 1 and result['type'] == 'info': 46 | #if failcount > 1: 47 | raise Exception("Invalid AUR API result for '%s'" % pkgname) 48 | #else: 49 | # return get(pkgname, failcount+1) 50 | return convert_data(result['results']) 51 | 52 | def sync(pkgname): 53 | a = get_pkg(pkgname) 54 | u = urllib.request.urlopen(a['srcpkg']) 55 | f = open(os.path.join(srcpkgdir, '%s.tar.gz' % pkgname), 'wb') 56 | f.write(u.read()) 57 | f.close() 58 | 59 | def print_results(data): 60 | if data['type'] == 'error': 61 | print('Error: %s' % data['results']) 62 | return 63 | if not isinstance(data['results'], list): 64 | data['results'] = [data['results'],] 65 | print('Packages:') 66 | for pkg in data['results']: 67 | for name in pkg: 68 | print(' %s: %s' % (name, pkg[name])) 69 | print('') 70 | 71 | def main(): 72 | usage = "usage: %prog [options] arg" 73 | parser = OptionParser(usage=usage) 74 | parser.set_defaults(search_mode=0) 75 | parser.add_option("-s", 76 | "--search", 77 | action="store_const", 78 | const=0, 79 | dest="search_mode", 80 | help="Operate in search mode") 81 | parser.add_option("-i", 82 | "--info", 83 | action="store_const", 84 | const=1, 85 | dest="search_mode", 86 | help="Operate in detail mode") 87 | 88 | (options, args) = parser.parse_args() 89 | if len(args) < 1: 90 | parser.error("Incorrect number of arguments") 91 | if options.search_mode == 1: 92 | info(args[0]) 93 | else: 94 | search(args[0]) 95 | 96 | if __name__ == "__main__": 97 | main() 98 | 99 | -------------------------------------------------------------------------------- /aurbs/ui/templates/package_log.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}{{ pkg.name }} :: log{% endblock %} 4 | 5 | {% block path %} 6 | {{ super () }} 7 | {{ bclink('package_list', 'Packages') }} 8 | {{ bclink('package_view', pkg.name, args={'pkgname': pkg.name}) }} 9 | {{ bclink('package_log', 'Log', args={'pkgname': pkg.name, 'build_arch': build_arch}) }} 10 | {% endblock %} 11 | 12 | {% block javascript %} 13 | {{ super() }} 14 | 15 | 98 | 99 | {% endblock %} 100 | 101 | {% block container %} 102 |
103 |
104 |
105 |
106 |

{{ pkg.name }} {{ pkg.version if pkg.version != 'n/a' }} :: {{ build_arch }}

107 |
108 |
109 |
110 | 111 |

112 | 				
113 |
114 |
115 |
116 | {% endblock %} 117 | -------------------------------------------------------------------------------- /templates/pacman.conf.in: -------------------------------------------------------------------------------- 1 | # 2 | # /etc/pacman.conf 3 | # 4 | # See the pacman.conf(5) manpage for option and repository directives 5 | 6 | # 7 | # GENERAL OPTIONS 8 | # 9 | [options] 10 | # The following paths are commented out with their default values listed. 11 | # If you wish to use different paths, uncomment and update the paths. 12 | #RootDir = / 13 | #DBPath = /var/lib/pacman/ 14 | CacheDir = /var/cache/pacman/pkg/ 15 | #LogFile = /var/log/pacman.log 16 | #GPGDir = /etc/pacman.d/gnupg/ 17 | HoldPkg = pacman glibc 18 | #XferCommand = /usr/bin/curl -C - -f %u > %o 19 | #XferCommand = /usr/bin/wget --passive-ftp -c -O %o %u 20 | #CleanMethod = KeepInstalled 21 | #UseDelta = 0.7 22 | Architecture = %ARCH% 23 | 24 | # Pacman won't upgrade packages listed in IgnorePkg and members of IgnoreGroup 25 | #IgnorePkg = 26 | #IgnoreGroup = 27 | 28 | #NoUpgrade = 29 | #NoExtract = 30 | 31 | # Misc options 32 | #UseSyslog 33 | #Color 34 | #TotalDownload 35 | CheckSpace 36 | #VerbosePkgLists 37 | 38 | # By default, pacman accepts packages signed by keys that its local keyring 39 | # trusts (see pacman-key and its man page), as well as unsigned packages. 40 | SigLevel = Required DatabaseOptional 41 | LocalFileSigLevel = Optional 42 | #RemoteFileSigLevel = Required 43 | 44 | # NOTE: You must run `pacman-key --init` before first using pacman; the local 45 | # keyring can then be populated with the keys of all official Arch Linux 46 | # packagers with `pacman-key --populate archlinux`. 47 | 48 | # 49 | # REPOSITORIES 50 | # - can be defined here or included from another file 51 | # - pacman will search repositories in the order defined here 52 | # - local/custom mirrors can be added here or in separate files 53 | # - repositories listed first will take precedence when packages 54 | # have identical names, regardless of version number 55 | # - URLs will have $repo replaced by the name of the current repo 56 | # - URLs will have $arch replaced by the name of the architecture 57 | # 58 | # Repository entries are of the format: 59 | # [repo-name] 60 | # Server = ServerName 61 | # Include = IncludePath 62 | # 63 | # The header [repo-name] is crucial - it must be present and 64 | # uncommented to enable the repo. 65 | # 66 | 67 | # The testing repositories are disabled by default. To enable, uncomment the 68 | # repo name header and Include lines. You can add preferred servers immediately 69 | # after the header, and they will be used before the default mirrors. 70 | 71 | #[testing] 72 | #Include = /etc/pacman.d/mirrorlist 73 | 74 | [core] 75 | Include = /etc/pacman.d/mirrorlist 76 | 77 | [extra] 78 | Include = /etc/pacman.d/mirrorlist 79 | 80 | #[community-testing] 81 | #Include = /etc/pacman.d/mirrorlist 82 | 83 | [community] 84 | Include = /etc/pacman.d/mirrorlist 85 | 86 | # If you want to run 32 bit applications on your x86_64 system, 87 | # enable the multilib repositories as required here. 88 | 89 | #[multilib-testing] 90 | #Include = /etc/pacman.d/mirrorlist 91 | 92 | #[multilib] 93 | #Include = /etc/pacman.d/mirrorlist 94 | 95 | # An example of a custom package repository. See the pacman manpage for 96 | # tips on creating your own repositories. 97 | #[custom] 98 | #SigLevel = Optional TrustAll 99 | #Server = file:///home/custompkgs 100 | 101 | [aurstaging] 102 | SigLevel = Never 103 | Server = http://127.0.0.1:8024/$arch 104 | -------------------------------------------------------------------------------- /aurbs/ui/templates/status.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Status{% endblock %} 4 | 5 | {% block path %} 6 | {{ super () }} 7 | {{ bclink('status', 'Status') }} 8 | {% endblock %} 9 | 10 | {% block container %} 11 | 61 |

Rebuilding x86_64

62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | 74 | 75 | 76 | 77 | 78 | 82 | 83 |
Scheduled (0)
79 |
    80 |
81 |
84 |
85 |
86 | 87 | 88 | 89 | 90 | 91 | 93 | 94 |
Building
92 |
95 |
96 |
97 | 98 | 99 | 100 | 101 | 102 | 106 | 107 |
Done (0)
103 |
    104 |
105 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 | {% endblock %} 121 | -------------------------------------------------------------------------------- /aurbs/ui/templates/package_view.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}{{ pkg.name }}{% endblock %} 4 | 5 | {% block path %} 6 | {{ super () }} 7 | {{ bclink('package_list', 'Packages') }} 8 | {{ bclink('package_view', pkg.name, args={'pkgname': pkg.name}) }} 9 | {% endblock %} 10 | 11 | {% block container %} 12 |
13 |
14 |
15 |
16 |

{{ pkg.name }} {{ pkg.version if pkg.version != 'n/a' }}

17 |
18 |
19 |
20 |
21 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 43 | 44 | 45 | 46 | 57 | 58 | 59 | 60 | 67 | 68 | 69 | 70 | 77 | 78 | 79 | 80 | 89 | 90 |
Description:{{ pkg.description }} 25 |
License:{{ pkg.license }}
Votes:{{ pkg.votes }}
Maintainer: 37 | {%- if pkg.maintainer -%} 38 | {{ pkg.maintainer }} 39 | {%- else -%} 40 | None 41 | {%- endif -%} 42 |
Local Dependencies:  47 |
    48 | {% for dep in local_depends %} 49 | {%- if dep.provider -%} 50 |
  • {{ dep.name }} ({{ dep.provider }})
  • 51 | {%- else -%} 52 |
  • {{ dep.name }}
  • 53 | {%- endif -%} 54 | {% endfor %} 55 |
56 |
Required by: 61 |
    62 | {% for dep in required_by %} 63 |
  • {{ dep }}
  • 64 | {% endfor %} 65 |
66 |
Provides: 71 |
    72 | {% for provide in pkg.provides %} 73 |
  • {{ provide }}
  • 74 | {% endfor %} 75 |
76 |
Builds: 81 |
    82 | {% for arch, buildlist in builds.items() %} 83 | {% for build in buildlist %} 84 |
  • {{ build }}
  • 85 | {% endfor %} 86 | {% endfor %} 87 |
88 |
91 |
92 |
93 | 94 | 95 | 96 | 97 | {% for arch, result in results|dictsort %} 98 | 99 | 100 | 101 | 102 | {% endfor %} 103 |
ArchResult
{{ arch }}{{ result_name(result) }}
104 | 105 | 106 | 107 | 108 | 109 | 117 | 118 |
Links
110 | 116 |
119 |
120 |
121 |
122 |
123 | {% endblock %} 124 | -------------------------------------------------------------------------------- /aurbs/ui/application.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import sys 5 | from flask import Flask, render_template, request, redirect, url_for, jsonify, Response, send_file, send_from_directory 6 | 7 | sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '../..')) 8 | from aurbs import dummy 9 | from aurbs.db import Database 10 | from aurbs.config import AurBSConfig 11 | from aurbs.static import * 12 | from aurbs.helper import * 13 | 14 | app = Flask(__name__) 15 | AurBSConfig("/etc/aurbs.yml") 16 | db = Database() 17 | 18 | @app.template_filter('datetimeformat') 19 | def datetimeformat(value, format='%Y-%m-%d %H:%M:%S'): 20 | return value.strftime(format) 21 | 22 | @app.route('/') 23 | def index(): 24 | repo_name = AurBSConfig().public_repo['name'] 25 | try: 26 | repo_url = AurBSConfig().public_repo['http_url'] 27 | except KeyError: 28 | repo_url = url_for('public_repo', _external=True) 29 | return render_template("base.html", repo_name=repo_name, repo_url=repo_url) 30 | 31 | @app.route('/status') 32 | def status(): 33 | return render_template("status.html", status=db.get_status()) 34 | 35 | @app.route('/status.json') 36 | def status_json(): 37 | return jsonify(db.get_status()) 38 | 39 | @app.route("/problems") 40 | def problems(): 41 | pkgs_failed = db.get_results(rtype='problem', type='fail') 42 | pkgs_notinaur = db.get_results(rtype='problem', type='not_in_aur') 43 | pkgs_depmiss = db.get_results(rtype='problem', type='missing_depends') 44 | pkgs_blocked = db.get_results(rtype='problem', type='blocked_depends') 45 | return render_template("problems.html", pkgs_failed=pkgs_failed, pkgs_notinaur=pkgs_notinaur, pkgs_depmiss=pkgs_depmiss, pkgs_blocked=pkgs_blocked) 46 | 47 | @app.route("/packages") 48 | @app.route("/packages/") 49 | def package_list(): 50 | pkgs = [] 51 | query = request.args.get('query', None) 52 | for pkgname in AurBSConfig().aurpkgs: 53 | try: 54 | pkg = db.get_pkg(pkgname) 55 | except KeyError: 56 | pkg = dummy.aurpkg(pkgname) 57 | if query: 58 | if query not in pkg['name']: 59 | continue 60 | pkgs.append(pkg) 61 | if query and len(pkgs) == 1: 62 | return redirect(url_for('package_view', pkgname=pkgs[0]['name'])) 63 | pkgs = sorted(pkgs, key=lambda i: i['name']) 64 | return render_template("package_list.html", pkgs=pkgs) 65 | 66 | @app.route("/packages/") 67 | def package_view(pkgname): 68 | try: 69 | pkg = db.get_pkg(pkgname) 70 | except KeyError: 71 | pkg = dummy.aurpkg(pkgname) 72 | building_arch = None 73 | try: 74 | status = db.get_status() 75 | if status['building'] == pkgname: 76 | building_arch = status['arch'] 77 | except KeyError: 78 | pass 79 | results = {} 80 | builds = {} 81 | for arch in pkg['arch']: 82 | if building_arch == arch or building_arch is not None and arch == 'any': 83 | results[arch] = {'rtype': 'building', 'rvalue': {'name': pkgname, 'build_arch': building_arch}} 84 | else: 85 | results[arch] = db.get_result(pkgname, build_arch=arch) 86 | if not results[arch]: 87 | if arch in AurBSConfig().architectures or arch == 'any': 88 | results[arch] = {'rtype': 'scheduled'} 89 | else: 90 | results[arch] = {'rtype': 'disabled'} 91 | try: 92 | builds[arch] = find_pkg_files(pkg.get('splitpkgs', pkgname), directory=repodir(arch)) 93 | except (IndexError, FileNotFoundError): 94 | pass 95 | 96 | local_depends = [] 97 | for dependency in db.filter_dependencies([pkg['depends']], local=True): 98 | try: 99 | db.get_pkg(dependency) 100 | local_depends.append({'name': dependency}) 101 | except KeyError: 102 | # pkg provided by provider 103 | provider = db.get_provider(dependency)['name'] 104 | local_depends.append({'name': dependency, 'provider': provider}) 105 | local_depends.append(dependency) 106 | 107 | required_by = db.get_pkg_required_by(pkgname) if not pkg.get('dummy') else [] 108 | 109 | return render_template("package_view.html", pkg=pkg, results=results, local_depends=local_depends, required_by=required_by, builds=builds) 110 | 111 | @app.route("/packages///log") 112 | def package_log(pkgname, build_arch): 113 | try: 114 | pkg = db.get_pkg(pkgname) 115 | except KeyError: 116 | return "pkg not found" 117 | try: 118 | status = db.get_status() 119 | if status['building'] == pkgname and status['arch'] == build_arch: 120 | building = True 121 | else: 122 | building = False 123 | except: 124 | building = False 125 | return render_template("package_log.html", pkg=pkg, build_arch=build_arch, building=building) 126 | 127 | @app.route("/packages///log.txt") 128 | def package_log_txt(pkgname, build_arch): 129 | try: 130 | pkg = db.get_pkg(pkgname) 131 | except KeyError: 132 | return "pkg not found" 133 | logfile = os.path.join(build_dir(build_arch), pkgname, "makepkg.log") 134 | if os.path.exists(logfile): 135 | seek = int(request.args.get('seek', -1)) 136 | f = open(logfile, 'rb') 137 | if seek != -1: 138 | f.seek(seek) 139 | logstr = f.read() 140 | # prevent x-sendfile as it doesn't support seeking 141 | return Response(logstr, content_type="text/plain", direct_passthrough=True) 142 | else: 143 | return send_file(f, mimetype="text/plain", add_etags=True, conditional=True) 144 | else: 145 | return "log not found" 146 | 147 | @app.route("/aurstaging//") 148 | def aurstaging_get(arch, pkg): 149 | if arch not in AurBSConfig().architectures and arch != 'any': 150 | return "404" 151 | else: 152 | return send_from_directory(repodir(arch), pkg) 153 | 154 | @app.route("/%s//" % AurBSConfig().public_repo['name']) 155 | def public_repo_get(arch, pkg): 156 | if arch not in AurBSConfig().architectures and arch != 'any': 157 | return "invalid arch" 158 | else: 159 | return send_from_directory(repodir_public(arch), pkg) 160 | 161 | @app.route("/%s" % AurBSConfig().public_repo['name']) 162 | def public_repo(): 163 | return "no directory listing using internal webserver" 164 | 165 | try: 166 | app.secret_key = open("/tmp/aurbs.secret.key", 'b').read() 167 | except: 168 | app.secret_key = os.urandom(24) 169 | open("/tmp/aurbs.secret.key", 'wb').write(app.secret_key) 170 | os.chmod("/tmp/aurbs.secret.key", 0o600) 171 | 172 | if __name__ == '__main__': 173 | app.debug = True 174 | app.run(host="0.0.0.0", port=80) 175 | -------------------------------------------------------------------------------- /aurbs/ui/templates/base.html: -------------------------------------------------------------------------------- 1 | {%- macro bclink(endpoint,text,args={}) -%} 2 |
  • / {{ text }}
  • 3 | {%- endmacro -%} 4 | {%- macro navlink(endpoint,text,subendpoints=[]) -%} 5 | {%- if request.endpoint.endswith(endpoint) or request.endpoint in subendpoints %} 6 |
  • {{text}}
  • 7 | {% else %} 8 |
  • {{text}}
  • 9 | {%- endif %} 10 | {%- endmacro -%} 11 | {%- macro result_title(result) -%} 12 | {%- if result.rtype == 'problem' -%} 13 | {%- if result.rvalue.type == 'missing_depends' -%} 14 | Missing dependencies: {{ ', '.join(result.rvalue.depends) }} 15 | {%- elif result.rvalue.type == 'blocked_depends' -%} 16 | Missing dependencies: {{ ', '.join(result.rvalue.depends) }} 17 | {%- elif result.rvalue.type == 'not_in_aur' -%} 18 | PKG does not exist in AUR 19 | {%- endif -%} 20 | {%- elif result.rtype == 'build' -%} 21 | Release: {{ result.rvalue.release }} 22 | {%- endif -%} 23 | {%- endmacro -%} 24 | {%- macro result_name(result) -%} 25 | {%- if result.rtype == 'problem' -%} 26 | {%- if result.rvalue.type == 'fail' -%} 27 | Failed 28 | {%- elif result.rvalue.type == 'not_in_aur' -%} 29 | Not in AUR 30 | {%- elif result.rvalue.type == 'missing_depends' -%} 31 | Dependency missing 32 | {%- elif result.rvalue.type == 'blocked_depends' -%} 33 | Blocked 34 | {%- endif -%} 35 | {%- elif result.rtype == 'build' -%} 36 | Built 37 | {%- elif result.rtype == 'building' -%} 38 | Building 39 | {%- elif result.rtype == 'scheduled' -%} 40 | Scheduled 41 | {%- elif result.rtype == 'disabled' -%} 42 | Disabled 43 | {%- endif -%} 44 | {%- endmacro -%} 45 | {%- macro result_class(result) -%} 46 | {%- if result.rtype == 'problem' -%} 47 | {%- if result.rvalue.type == 'fail' -%} 48 | important 49 | {%- else -%} 50 | warning 51 | {%- endif -%} 52 | {%- elif result.rtype == 'build' -%} 53 | success 54 | {%- elif result.rtype == 'building' -%} 55 | info 56 | {%- elif result.rtype == 'scheduled' -%} 57 | inverse 58 | {%- elif result.rtype == 'disabled' -%} 59 | default 60 | {%- endif -%} 61 | {%- endmacro -%} 62 | {%- macro result_log_link(result) -%} 63 | {%- if result.rtype == 'problem' -%} 64 | {%- if result.rvalue.type == 'fail' -%} 65 | href="{{ url_for('package_log', pkgname=result.rvalue.name, build_arch=result.rvalue.build_arch) }}" 66 | {%- endif -%} 67 | {%- elif result.rtype in ['build', 'building'] -%} 68 | href="{{ url_for('package_log', pkgname=result.rvalue.name, build_arch=result.rvalue.build_arch) }}" 69 | {%- endif -%} 70 | {%- endmacro -%} 71 | 72 | 73 | 74 | 75 | {% block basetitle %}AurBS :: {% endblock %}{% block title %}{% endblock %} 76 | 77 | 78 | 79 | 80 | 85 | 86 | 87 | 88 | 89 | 92 | 93 | 94 | {% block javascript %} 95 | 96 | 104 | {% endblock %} 105 | 106 | 126 | 127 |
    128 | 132 | {% block messages %} 133 | {% with messages = get_flashed_messages(with_categories=true) %} 134 | {% if messages %} 135 |
    136 | {% for category, message in messages %} 137 |
    {{ category.title() }}: {{ message|safe }}
    138 | {% endfor %} 139 |
    140 | 145 | {% endif %} 146 | {% endwith %} 147 | {% endblock %} 148 | {% block container %} 149 | 150 |
    151 |

    Aur Build System

    152 |

    Automatic AUR package building.

    153 |
    154 |
    155 | [{{ repo_name }}]
    156 | SigLevel = Never
    157 | Server = {{ repo_url }}/$arch
    158 | 
    159 | {% endblock %} 160 |
    161 | 162 | 163 | -------------------------------------------------------------------------------- /templates/makepkg.conf.in: -------------------------------------------------------------------------------- 1 | # 2 | # /etc/makepkg.conf 3 | # 4 | 5 | ######################################################################### 6 | # SOURCE ACQUISITION 7 | ######################################################################### 8 | # 9 | #-- The download utilities that makepkg should use to acquire sources 10 | # Format: 'protocol::agent' 11 | DLAGENTS=('ftp::/usr/bin/curl -fC - --ftp-pasv --retry 3 --retry-delay 3 -o %o %u' 12 | 'http::/usr/bin/curl -fLC - --retry 3 --retry-delay 3 -o %o %u' 13 | 'https::/usr/bin/curl -fLC - --retry 3 --retry-delay 3 -o %o %u' 14 | 'rsync::/usr/bin/rsync --no-motd -z %u %o' 15 | 'scp::/usr/bin/scp -C %u %o') 16 | 17 | # Other common tools: 18 | # /usr/bin/snarf 19 | # /usr/bin/lftpget -c 20 | # /usr/bin/wget 21 | 22 | #-- The the package required by makepkg to download VCS sources 23 | # Format: 'protocol::package' 24 | VCSCLIENTS=('bzr::bzr' 25 | 'git::git' 26 | 'hg::mercurial' 27 | 'svn::subversion') 28 | 29 | ######################################################################### 30 | # ARCHITECTURE, COMPILE FLAGS 31 | ######################################################################### 32 | # 33 | CARCH="%CARCH%" 34 | CHOST="%CHOST%" 35 | 36 | #-- Compiler and Linker Flags 37 | # -march (or -mcpu) builds exclusively for an architecture 38 | # -mtune optimizes for an architecture, but builds for whole processor family 39 | CPPFLAGS="-D_FORTIFY_SOURCE=2" 40 | CFLAGS="-march=%MARCH% -mtune=generic -O2 -pipe -fstack-protector-strong --param=ssp-buffer-size=4" 41 | CXXFLAGS="-march=%MARCH% -mtune=generic -O2 -pipe -fstack-protector-strong --param=ssp-buffer-size=4" 42 | LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro" 43 | #-- Make Flags: change this for DistCC/SMP systems 44 | MAKEFLAGS="-j1" 45 | #-- Debugging flags 46 | DEBUG_CFLAGS="-g -fvar-tracking-assignments" 47 | DEBUG_CXXFLAGS="-g -fvar-tracking-assignments" 48 | 49 | ######################################################################### 50 | # BUILD ENVIRONMENT 51 | ######################################################################### 52 | # 53 | # Defaults: BUILDENV=(!distcc color !ccache check !sign) 54 | # A negated environment option will do the opposite of the comments below. 55 | # 56 | #-- distcc: Use the Distributed C/C++/ObjC compiler 57 | #-- color: Colorize output messages 58 | #-- ccache: Use ccache to cache compilation 59 | #-- check: Run the check() function if present in the PKGBUILD 60 | #-- sign: Generate PGP signature file 61 | # 62 | BUILDENV=(!distcc color ccache check !sign) 63 | # 64 | #-- If using DistCC, your MAKEFLAGS will also need modification. In addition, 65 | #-- specify a space-delimited list of hosts running in the DistCC cluster. 66 | #DISTCC_HOSTS="" 67 | # 68 | #-- Specify a directory for package building. 69 | #BUILDDIR=/tmp/makepkg 70 | 71 | ######################################################################### 72 | # GLOBAL PACKAGE OPTIONS 73 | # These are default values for the options=() settings 74 | ######################################################################### 75 | # 76 | # Default: OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !upx !debug) 77 | # A negated option will do the opposite of the comments below. 78 | # 79 | #-- strip: Strip symbols from binaries/libraries 80 | #-- docs: Save doc directories specified by DOC_DIRS 81 | #-- libtool: Leave libtool (.la) files in packages 82 | #-- staticlibs: Leave static library (.a) files in packages 83 | #-- emptydirs: Leave empty directories in packages 84 | #-- zipman: Compress manual (man and info) pages in MAN_DIRS with gzip 85 | #-- purge: Remove files specified by PURGE_TARGETS 86 | #-- upx: Compress binary executable files using UPX 87 | #-- debug: Add debugging flags as specified in DEBUG_* variables 88 | # 89 | OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !upx !debug) 90 | 91 | #-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512 92 | INTEGRITY_CHECK=(md5) 93 | #-- Options to be used when stripping binaries. See `man strip' for details. 94 | STRIP_BINARIES="--strip-all" 95 | #-- Options to be used when stripping shared libraries. See `man strip' for details. 96 | STRIP_SHARED="--strip-unneeded" 97 | #-- Options to be used when stripping static libraries. See `man strip' for details. 98 | STRIP_STATIC="--strip-debug" 99 | #-- Manual (man and info) directories to compress (if zipman is specified) 100 | MAN_DIRS=({usr{,/local}{,/share},opt/*}/{man,info}) 101 | #-- Doc directories to remove (if !docs is specified) 102 | DOC_DIRS=(usr/{,local/}{,share/}{doc,gtk-doc} opt/*/{doc,gtk-doc}) 103 | #-- Files to be removed from all packages (if purge is specified) 104 | PURGE_TARGETS=(usr/{,share}/info/dir .packlist *.pod) 105 | 106 | ######################################################################### 107 | # PACKAGE OUTPUT 108 | ######################################################################### 109 | # 110 | # Default: put built package and cached source in build directory 111 | # 112 | #-- Destination: specify a fixed directory where all packages will be placed 113 | #PKGDEST=/home/packages 114 | #-- Source cache: specify a fixed directory where source files will be cached 115 | #SRCDEST=/home/sources 116 | #-- Source packages: specify a fixed directory where all src packages will be placed 117 | #SRCPKGDEST=/home/srcpackages 118 | #-- Log files: specify a fixed directory where all log files will be placed 119 | #LOGDEST=/home/makepkglogs 120 | #-- Packager: name/email of the person or organization building packages 121 | #PACKAGER="AUR BS " 122 | #-- Specify a key to use for package signing 123 | #GPGKEY="" 124 | 125 | ######################################################################### 126 | # COMPRESSION DEFAULTS 127 | ######################################################################### 128 | # 129 | COMPRESSGZ=(gzip -c -f -n) 130 | COMPRESSBZ2=(bzip2 -c -f) 131 | COMPRESSXZ=(xz -c -z -) 132 | COMPRESSLRZ=(lrzip -q) 133 | COMPRESSLZO=(lzop -q) 134 | COMPRESSZ=(compress -c -f) 135 | 136 | ######################################################################### 137 | # EXTENSION DEFAULTS 138 | ######################################################################### 139 | # 140 | # WARNING: Do NOT modify these variables unless you know what you are 141 | # doing. 142 | # 143 | PKGEXT='.pkg.tar.xz' 144 | SRCEXT='.src.tar.gz' 145 | 146 | # vim: set ft=sh ts=2 sw=2 et: 147 | -------------------------------------------------------------------------------- /aurbs/aurinfo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | from copy import copy, deepcopy 4 | import pprint 5 | import sys 6 | 7 | class Attr(object): 8 | def __init__(self, name, is_multivalued=False, allow_arch_extensions=False): 9 | self.name = name 10 | self.is_multivalued = is_multivalued 11 | self.allow_arch_extensions = allow_arch_extensions 12 | 13 | PKGBUILD_ATTRIBUTES = { 14 | 'arch': Attr('arch', True), 15 | 'backup': Attr('backup', True), 16 | 'changelog': Attr('changelog', False), 17 | 'checkdepends': Attr('checkdepends', True), 18 | 'conflicts': Attr('conflicts', True, True), 19 | 'depends': Attr('depends', True, True), 20 | 'epoch': Attr('epoch', False), 21 | 'groups': Attr('groups', True), 22 | 'install': Attr('install', False), 23 | 'license': Attr('license', True), 24 | 'makedepends': Attr('makedepends', True, True), 25 | 'md5sums': Attr('md5sums', True, True), 26 | 'noextract': Attr('noextract', True), 27 | 'optdepends': Attr('optdepends', True, True), 28 | 'options': Attr('options', True), 29 | 'pkgname': Attr('pkgname', False), 30 | 'pkgrel': Attr('pkgrel', False), 31 | 'pkgver': Attr('pkgver', False), 32 | 'provides': Attr('provides', True, True), 33 | 'replaces': Attr('replaces', True, True), 34 | 'sha1sums': Attr('sha1sums', True, True), 35 | 'sha224sums': Attr('sha224sums', True, True), 36 | 'sha256sums': Attr('sha256sums', True, True), 37 | 'sha384sums': Attr('sha384sums', True, True), 38 | 'sha512sums': Attr('sha512sums', True, True), 39 | 'source': Attr('source', True, True), 40 | 'url': Attr('url', False), 41 | 'validpgpkeys': Attr('validpgpkeys', True), 42 | } 43 | 44 | def find_attr(attrname): 45 | # exact match 46 | attr = PKGBUILD_ATTRIBUTES.get(attrname, None) 47 | if attr: 48 | return attr 49 | 50 | # prefix match 51 | # XXX: this could break in the future if PKGBUILD(5) ever 52 | # introduces a key which is a subset of another. 53 | for k in PKGBUILD_ATTRIBUTES.keys(): 54 | if attrname.startswith(k + '_'): 55 | return PKGBUILD_ATTRIBUTES[k] 56 | 57 | def IsMultiValued(attrname): 58 | attr = find_attr(attrname) 59 | return attr and attr.is_multivalued 60 | 61 | class AurInfo(object): 62 | def __init__(self): 63 | self._pkgbase = {} 64 | self._packages = {} 65 | 66 | def GetPackageNames(self): 67 | return self._packages.keys() 68 | 69 | def GetMergedPackage(self, pkgname): 70 | package = deepcopy(self._pkgbase) 71 | package['pkgname'] = pkgname 72 | for k, v in self._packages.get(pkgname).items(): 73 | package[k] = deepcopy(v) 74 | return package 75 | 76 | def AddPackage(self, pkgname): 77 | self._packages[pkgname] = {} 78 | return self._packages[pkgname] 79 | 80 | def SetPkgbase(self, pkgbasename): 81 | self._pkgbase = {'pkgname' : pkgbasename} 82 | return self._pkgbase 83 | 84 | 85 | class StderrECatcher(object): 86 | def Catch(self, lineno, error): 87 | print('ERROR[%d]: %s' % (lineno, error), file=sys.stderr) 88 | 89 | 90 | class CollectionECatcher(object): 91 | def __init__(self): 92 | self._errors = [] 93 | 94 | def Catch(self, lineno, error): 95 | self._errors.append((lineno, error)) 96 | 97 | def HasErrors(self): 98 | return len(self._errors) > 0 99 | 100 | def Errors(self): 101 | return copy(self._errors) 102 | 103 | 104 | def ParseAurinfoFromIterable(iterable, ecatcher=None): 105 | aurinfo = AurInfo() 106 | 107 | if ecatcher is None: 108 | ecatcher = StderrECatcher() 109 | 110 | current_package = None 111 | lineno = 0 112 | 113 | for line in iterable: 114 | lineno += 1 115 | 116 | if not line.strip(): 117 | # end of package 118 | current_package = None 119 | continue 120 | 121 | if line.startswith('#'): 122 | # allow (and ignore) commented lines 123 | continue 124 | 125 | if not line.startswith('\t'): 126 | # start of new package 127 | try: 128 | key, value = map(str.strip, line.split('=', 1)) 129 | except ValueError: 130 | ecatcher.Catch(lineno, 'unexpected header format in section=%s' % 131 | current_package['pkgname']) 132 | continue 133 | 134 | if key == 'pkgbase': 135 | current_package = aurinfo.SetPkgbase(value) 136 | else: 137 | current_package = aurinfo.AddPackage(value) 138 | else: 139 | # package attribute 140 | if current_package is None: 141 | ecatcher.Catch(lineno, 'package attribute found outside of ' 142 | 'a package section') 143 | continue 144 | 145 | try: 146 | key, value = map(str.strip, line.split('=', 1)) 147 | except ValueError: 148 | ecatcher.Catch(lineno, 'unexpected attribute format in ' 149 | 'section=%s' % current_package['pkgname']) 150 | 151 | if IsMultiValued(key): 152 | if not current_package.get(key): 153 | current_package[key] = [] 154 | if value: 155 | current_package[key].append(value) 156 | else: 157 | if not current_package.get(key): 158 | current_package[key] = value 159 | else: 160 | ecatcher.Catch(lineno, 'overwriting attribute ' 161 | '%s: %s -> %s' % (key, current_package[key], 162 | value)) 163 | 164 | return aurinfo 165 | 166 | 167 | def ParseAurinfo(filename='.AURINFO', ecatcher=None): 168 | with open(filename) as f: 169 | return ParseAurinfoFromIterable(f, ecatcher) 170 | 171 | 172 | def ValidateAurinfo(filename='.AURINFO'): 173 | ecatcher = CollectionECatcher() 174 | ParseAurinfo(filename, ecatcher) 175 | errors = ecatcher.Errors() 176 | for error in errors: 177 | print('error on line %d: %s' % error, file=sys.stderr) 178 | return not errors 179 | 180 | 181 | if __name__ == '__main__': 182 | pp = pprint.PrettyPrinter(indent=4) 183 | 184 | if len(sys.argv) == 1: 185 | print('error: not enough arguments') 186 | sys.exit(1) 187 | elif len(sys.argv) == 2: 188 | action = sys.argv[1] 189 | filename = '.AURINFO' 190 | else: 191 | action, filename = sys.argv[1:3] 192 | 193 | if action == 'parse': 194 | aurinfo = ParseAurinfo() 195 | for pkgname in aurinfo.GetPackageNames(): 196 | print(">>> merged package: %s" % pkgname) 197 | pp.pprint(aurinfo.GetMergedPackage(pkgname)) 198 | print() 199 | elif action == 'validate': 200 | sys.exit(not ValidateAurinfo(filename)) 201 | else: 202 | print('unknown action: %s' % action) 203 | sys.exit(1) 204 | 205 | # vim: set et ts=4 sw=4: 206 | -------------------------------------------------------------------------------- /aurbs/pkg_parser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # This is a parser for bash-style variable assignments. 4 | # It fully (?) supports the syntax w/o braces, the ${}-syntax 5 | # is implemented very hackish. (There are also detailed 6 | # TODO-comments in various places) 7 | 8 | # This is meant for Archlinux' AUR to be used to parse 9 | # PKGBUILD's properly. 10 | 11 | # At the moment I have problems to find PKGBUILD's that are challenging 12 | # for this script. 13 | 14 | import re 15 | 16 | reName = re.compile(r"([\w_][\w\d_]*)") 17 | reAssignment = re.compile(r"([\w_][\w\d_]*)=") 18 | reVar = re.compile(r"\$([a-zA-Z0-0_]+)") 19 | 20 | def bashGlobToRegex(glob): 21 | # Reference: bash(1) "Pathname Expansion" 22 | # TODO: 23 | # * characater classes, equivalence classes, that other foo 24 | # ( [:upper:], [=c=], [.symbol.] 25 | # First should be easy to implement, python's regexes even have 26 | # similar classes (\w, \W, \d, \D, \s, \S) 27 | # Afaik, the first syntax is used rarely, the following two are 28 | # used _never_. No urge to implement them. 29 | # * extended globs: 30 | # ( ?(pattern), *(pattern), +(pattern), etc...) 31 | # Never seen someone use thiese, no urge to implement them. 32 | # * bash may have different handling of \: 33 | # This code just ignores the following characters' special meaning 34 | # and lets it match itself. Bash may evaluate e.g. "\a" as "\a" 35 | # and not as "a". (But both should eval "\*" as "*".) 36 | # UPDATE: This should now be solved. But it was done with magic. 37 | 38 | res = "" 39 | ptr = 0 40 | while ptr < len(glob): 41 | if glob[ptr] == "\\": 42 | ptr += 1 43 | res += re.escape(glob[ptr]) 44 | elif glob[ptr] == "*": 45 | res += ".*?" 46 | elif glob[ptr] == "?": 47 | res += "." 48 | elif glob[ptr] == "[": 49 | ptr += 1 50 | res += "[" 51 | if glob[ptr] == "^" or glob[ptr] == "!": 52 | res += "^" 53 | ptr += 1 54 | if glob[ptr] == "]": 55 | res += "]" 56 | ptr += 1 57 | if glob[ptr-1] in "[^!" and glob[ptr] == "\\": 58 | ptr += 1 59 | while glob[ptr] != "]": 60 | if glob[ptr] == "\\" and glob[ptr+1] in "wsdbaWSDBAZ": 61 | res += "\\" 62 | res += glob[ptr] 63 | ptr += 1 64 | res += "]" 65 | else: 66 | res += re.escape(glob[ptr]) 67 | ptr += 1 68 | return res 69 | 70 | def expandParams(symbols, text): 71 | # Reference: bash(1) "Parameter Expansion" 72 | # Done: 73 | # * ${foo} 74 | # * ${foo/glob/substitute} 75 | # 76 | # Not done: 77 | # * all other syntaxes 78 | # * support for arrays will not be needed 79 | # * support for 2nd-level indirection will not be needed 80 | # ( foo="bar"; bar="baz"; ${!foo} -> "baz" ) 81 | 82 | res = "" 83 | ptr = 0 84 | while ptr < len(text): 85 | if text[ptr] == "\\": 86 | ptr += 1 87 | elif text[ptr] == "$": 88 | ptr += 1 89 | 90 | # are there braces? 91 | if text[ptr] == "{": 92 | ptr += 1 93 | name = reName.match(text, ptr) 94 | ptr = name.end() 95 | 96 | if text[ptr] == "/": 97 | # Pattern substitution 98 | # ${parameter/pattern/string} 99 | 100 | ptr += 1 101 | 102 | # check if all occurences should be substituted 103 | suball = False 104 | if text[ptr] == "/": 105 | suball = True 106 | ptr += 1 107 | 108 | # fetch the pattern 109 | pattern = "" 110 | while text[ptr] != "/" and text[ptr] != "}": 111 | if text[ptr] == "\\" and text[ptr+1] in "\\}/": 112 | ptr += 1 113 | pattern += text[ptr] 114 | ptr += 1 115 | 116 | # fetch the substitute 117 | substitute = "" 118 | if text[ptr] != "}": # there might be no substitute 119 | ptr += 1 120 | while text[ptr] != "}": 121 | if text[ptr] == "\\": 122 | ptr += 1 123 | substitute += text[ptr] 124 | ptr += 1 125 | 126 | if pattern.startswith("#"): 127 | align = 1 128 | pattern = pattern[1:] 129 | elif pattern.startswith("\\#"): 130 | pattern = pattern[1:] 131 | elif pattern.endswith("%%"): 132 | align = 2 133 | pattern = pattern[:-1] 134 | elif pattern.endswith("\\%%"): 135 | pattern = pattern[:-1] 136 | else: 137 | align = 0 138 | regex = ((align == 1 and "^" or "") + 139 | bashGlobToRegex(pattern) + 140 | (align == 2 and "$" or "")) 141 | 142 | res += re.sub(regex, substitute, 143 | symbols.get(name.group(1), ""), 144 | 0 if suball else 1) 145 | else: 146 | # 'normal' expansion 147 | symb = symbols.get(name.group(1), "") 148 | if isinstance(symb, list): 149 | # workaround packagers not knowing the 150 | # difference between string and array 151 | symb = ' '.join(symb) 152 | res += symb 153 | else: 154 | name = reName.match(text, ptr) 155 | res += symbols.get(name.group(1), "") 156 | ptr = name.end() 157 | else: 158 | res += text[ptr] 159 | ptr += 1 160 | return res 161 | 162 | def parseStr(symbols, line, ptr): 163 | # TODO: 164 | # * This does not parse escapes 165 | res = "" 166 | if line[ptr] == "'": 167 | ptr += 1 168 | while line[ptr] != "'": 169 | res += line[ptr] 170 | ptr += 1 171 | ptr += 1 172 | elif line[ptr] == '"': 173 | ptr += 1 174 | while line[ptr] != '"': 175 | res += line[ptr] 176 | ptr += 1 177 | res = expandParams(symbols, res) 178 | ptr += 1 179 | else: 180 | while (len(line) > ptr) and (not line[ptr] in " \t)"): 181 | res += line[ptr] 182 | ptr += 1 183 | res = expandParams(symbols, res) 184 | return ptr, res 185 | 186 | def parseFile(fileh): 187 | # TODO: 188 | # * Lines are not parsed like they should be. 189 | # This is very unlikely to break the variable assignments 190 | # at the begining of the file, but will break if a 'string' 191 | # spanning multiple lines is encountered and it uses \ at the end 192 | # of the line (e.g. a bash script in a bash script) 193 | lines = [""] 194 | 195 | for line in fileh: 196 | try: 197 | line = line.decode("UTF-8") 198 | except Exception: 199 | line = line 200 | # remove newlines 201 | line = line.rstrip("\r\n") 202 | if not line: continue 203 | lines[-1] += line 204 | if line[-1] == "\\": 205 | lines[-1] = lines[-1].rstrip("\\") 206 | else: 207 | lines.append("") 208 | 209 | symbols = {} 210 | i = 0 211 | while i < len(lines): 212 | line = lines[i] 213 | line = reVar.sub("${\g<1>}", line) 214 | i += 1 215 | 216 | assignment = reAssignment.match(line) 217 | if not assignment: 218 | continue 219 | ptr = assignment.end() 220 | 221 | # the parser relys on proper syntax. syntax errors are 222 | # catched here 223 | try: 224 | if line[ptr] == "(": 225 | symbols[assignment.group(1)] = [] 226 | ptr += 1 227 | while True: 228 | while (ptr < len(line)) and (line[ptr] != ")"): 229 | if not line[ptr] in " \t": 230 | ptr, val = parseStr(symbols, line, ptr) 231 | symbols[assignment.group(1)].append(val) 232 | else: 233 | ptr += 1 234 | if (ptr < len(line)) and (line[ptr] == ")"): break 235 | if len(lines) == i: break 236 | line = lines[i] 237 | i += 1 238 | ptr = 0 239 | else: 240 | ptr, symbols[assignment.group(1)] = parseStr( 241 | symbols, line, ptr) 242 | except IndexError: 243 | print("Syntax error, continuing", file=sys.stderr) 244 | continue 245 | return symbols 246 | 247 | if __name__ == '__main__': 248 | import sys 249 | 250 | class cooldict(dict): 251 | def __init__(self, foo): 252 | self.foo = foo 253 | def __getitem__(self, key): 254 | res = self.foo.get(key, "(missing)") 255 | if hasattr(res, 'sort'): 256 | res = ', '.join(res) 257 | return res 258 | 259 | if len(sys.argv) < 2: 260 | print("Usage: %s "%(sys.argv[0])) 261 | exit(1) 262 | 263 | res = cooldict(parseFile( 264 | open(sys.argv[1]) if sys.argv[1] != "-" else sys.stdin)) 265 | print("""%(pkgname)s %(pkgver)s 266 | %(pkgdesc)s 267 | 268 | Licenses: %(license)s 269 | Architectures: %(arch)s 270 | 271 | Dependencies: %(depends)s 272 | for make: %(makedepends)s 273 | 274 | Source: %(source)s"""%res) 275 | -------------------------------------------------------------------------------- /aurbs/db.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import tarfile 4 | import os 5 | import logging 6 | import datetime 7 | import shutil 8 | import re 9 | from pymongo import MongoClient 10 | 11 | from aurbs import aur 12 | from aurbs import dummy 13 | from aurbs import aurinfo 14 | from aurbs import pkg_parser 15 | from aurbs.config import AurBSConfig 16 | from aurbs.model import * 17 | from aurbs.static import * 18 | from aurbs.helper import * 19 | 20 | log = logging.getLogger('aurbs') 21 | 22 | @Singleton 23 | class Database(object): 24 | def __init__(self): 25 | self._client = MongoClient(AurBSConfig().database['host'], AurBSConfig().database['port']) 26 | self._db = self._client[AurBSConfig().database['database']] 27 | if AurBSConfig().database.get('user'): 28 | try: 29 | self._db.authenticate(AurBSConfig().database['user'], AurBSConfig().database['pass']) 30 | log.info("Authentificated with database user '%s'" % AurBSConfig().database['user']) 31 | except Exception as e: 32 | print(e) 33 | raise FatalError("Authentification failed with database user '%s'" % AurBSConfig().database['user']) 34 | else: 35 | try: 36 | self._db.collection_names() 37 | except Exception: 38 | raise FatalError("Database requires authentification") 39 | self._db.packages.ensure_index("name", unique=True, dropDups=True) 40 | for rtype in ['build', 'problem']: 41 | self._db["%ss" % rtype].ensure_index("name") 42 | 43 | def get_pkg(self, pkgname): 44 | pkg = self._db.packages.find_one({"name": pkgname}) 45 | if pkg: 46 | return pkg 47 | else: 48 | raise KeyError("Package '%s' not found in database" % pkgname) 49 | 50 | def get_pkgbase(self, pkgname): 51 | pkg = self._db.packages.find_one({"splitpkgs": pkgname}) 52 | if pkg: 53 | return pkg 54 | else: 55 | raise KeyError("Package '%s' not found in database" % pkgname) 56 | 57 | def get_provider(self, pkgname): 58 | pkg = self._db.packages.find_one({"provides": pkgname}) 59 | if pkg: 60 | return pkg 61 | else: 62 | raise KeyError("Package '%s' not found in database" % pkgname) 63 | 64 | def get_all_provides(self): 65 | provides = [] 66 | for pkg in self._db.packages.find({}): 67 | provides.extend(pkg['provides']) 68 | return set(provides) 69 | 70 | def sync_pkg(self, pkgname): 71 | # download the src pkg 72 | aur.sync(pkgname) 73 | 74 | # get some info via api 75 | pkg = aur.get_pkg(pkgname) 76 | 77 | # parse the .SRCINFO/.AURINFO file from the src pkg 78 | # using https://github.com/falconindy/pkgbuild-introspection/blob/master/test/aurinfo.py 79 | tar = tarfile.open(os.path.join(srcpkgdir, '%s.tar.gz' % pkgname)) 80 | if '%s/.AURINFO' % pkg['pkgbase'] in tar.getnames(): 81 | srcinfo = tar.extractfile('%s/.AURINFO' % pkg['pkgbase']) 82 | elif '%s/.SRCINFO' % pkg['pkgbase'] in tar.getnames(): 83 | srcinfo = tar.extractfile('%s/.SRCINFO' % pkg['pkgbase']) 84 | else: 85 | # legacy mode: ancent pkg withoud .SRCINFO/.AURINFO 86 | # --> need to parse PKGBUILD 87 | log.info("Falling back to legacy PKGBUILD parser for pkg '%s'" % pkgname) 88 | pkgbuild = pkg_parser.parseFile(tar.extractfile('%s/PKGBUILD' % pkg['pkgbase'])) 89 | #FIXME: legacy mode fails because there are no pkg['provides'] and pkg['splitpkgs'] 90 | 91 | try: 92 | srcinfo = srcinfo.read().decode("UTF-8").split("\n") 93 | srcinfo = aurinfo.ParseAurinfoFromIterable(srcinfo, AurInfoEcatcher(pkgname, log)) 94 | # handle splitpkgs 95 | pkg['splitpkgs'] = list(srcinfo.GetPackageNames()) 96 | pkg['provides'] = list(srcinfo.GetPackageNames()) 97 | srcinfo = [srcinfo.GetMergedPackage(splitpkg) for splitpkg in pkg['provides']] 98 | except NameError: 99 | srcinfo = [pkgbuild] 100 | 101 | pkg['arch'] = srcinfo[0]['arch'] 102 | pkg['depends'] = [] 103 | pkg['makedepends'] = [] 104 | 105 | for splitpkg in srcinfo: 106 | pkg['depends'].extend(clean_dep_ver(splitpkg.get('depends', []))) 107 | pkg['makedepends'].extend(clean_dep_ver(splitpkg.get('makedepends', []))) 108 | pkg['provides'].extend(clean_dep_ver(splitpkg.get('provides', []))) 109 | 110 | # drop duplicate entries 111 | pkg['depends'] = list(set(pkg['depends'])) 112 | pkg['makedepends'] = list(set(pkg['makedepends'])) 113 | pkg['provides'] = list(set(pkg['provides'])) 114 | 115 | # filter loop deps pointing to provides of this pkg 116 | pkg['depends'] = list(filter(lambda d: d not in pkg['provides'], pkg['depends'])) 117 | pkg['makedepends'] = list(filter(lambda d: d not in pkg['provides'], pkg['makedepends'])) 118 | 119 | if not self._db.packages.update({"name": pkgname}, {"$set": pkg})['n']: 120 | self._db.packages.insert(pkg) 121 | 122 | def update_pkg(self, pkgname, pkg): 123 | self._db.packages.update({"name": pkgname}, {"$set": pkg}) 124 | 125 | def _cleanup_results(self, pkgname): 126 | # cleanup non-matching results (any-results for i686-x86_64 pkg) 127 | try: 128 | pkg = self.get_pkg(pkgname) 129 | any_arch = True if pkg['arch'][0] == "any" else False 130 | for rtype in ['build', 'problem']: 131 | for result in self._db["%ss" % rtype].find({'name': pkgname}): 132 | if any_arch and result['arch'] == 'any': 133 | continue 134 | elif not any_arch and result['arch'] != 'any': 135 | continue 136 | self._db["%ss" % rtype].remove({'_id': result['_id']}) 137 | except KeyError: 138 | pass 139 | 140 | def set_result(self, pkgname, build_arch, rtype, **kwargs): 141 | # arch via pkg from db 142 | # rtype = problem | build 143 | self._cleanup_results(pkgname) 144 | try: 145 | pkg = self.get_pkg(pkgname) 146 | arch = "any" if pkg['arch'][0] == "any" else build_arch 147 | except KeyError: 148 | pkg = dummy.aurpkg(pkgname) 149 | arch = build_arch 150 | setv = { 151 | "name": pkg['name'], 152 | "arch": arch, 153 | "build_arch": build_arch, 154 | "date": datetime.datetime.utcnow(), 155 | "pkg": pkg['_id'] 156 | } 157 | if rtype == 'build': 158 | setv.update({ 159 | "linkdepends": kwargs['linkdepends'], 160 | "release": kwargs['release'], 161 | "version": pkg['version'] 162 | }) 163 | elif rtype == 'problem': 164 | setv.update({ 165 | "type": kwargs['ptype'] 166 | }) 167 | if kwargs['ptype'] in ['blocked_depends', 'missing_depends']: 168 | setv.update({ 169 | "depends": kwargs['depends'] 170 | }) 171 | elif kwargs['ptype'] == 'fail': 172 | setv.update({ 173 | "linkdepends": kwargs['linkdepends'], 174 | "version": pkg['version'] 175 | }) 176 | try: 177 | setr = self.get_result(pkgname, build_arch=build_arch, rtype=rtype) 178 | self._db["%ss" % rtype].update({'_id': setr['_id']}, setv) 179 | except KeyError: 180 | setr = self._db["%ss" % rtype].insert(setv) 181 | 182 | def get_result(self, pkgname, build_arch=None, arch=None, rtype=None): 183 | # rtype = problem | build 184 | if not arch: 185 | try: 186 | pkg = self.get_pkg(pkgname) 187 | # arch via pkg from db 188 | arch = "any" if pkg['arch'][0] == "any" else build_arch 189 | except KeyError: 190 | arch = build_arch 191 | if rtype: 192 | rvalue = self._db["%ss" % rtype].find_one({'name': pkgname, 'arch': arch}) 193 | if not rvalue: 194 | raise KeyError("No %s result for %s-%s" % (rtype, pkgname, arch)) 195 | return rvalue 196 | else: 197 | for rtype in ['problem', 'build']: 198 | rvalue = self._db["%ss" % rtype].find_one({'name': pkgname, 'arch': arch}) 199 | if rvalue: 200 | return {"rtype": rtype, "rvalue": rvalue} 201 | return None 202 | 203 | def get_results(self, rtype, **query): 204 | return self._db["%ss" % rtype].find(query) 205 | 206 | def delete_result(self, pkgname, build_arch, rtype): 207 | # arch via pkg from db 208 | # rtype = problem | build 209 | self._cleanup_results(pkgname) 210 | try: 211 | pkg = self.get_pkg(pkgname) 212 | arch = "any" if pkg['arch'][0] == "any" else build_arch 213 | except KeyError: 214 | arch = build_arch 215 | rvalue = self._db["%ss" % rtype].find_one({'name': pkgname, 'arch': arch}) 216 | if rvalue: 217 | self._db["%ss" % rtype].remove({'_id': rvalue['_id']}) 218 | 219 | def set_status(self, scheduled, done, arch, building=None): 220 | count = len(scheduled) + len(done) 221 | count += 1 if building else 0 222 | status = { 223 | "type": "status", 224 | "scheduled": scheduled, 225 | "done": done, 226 | "building": building, 227 | "arch": arch, 228 | "count": count 229 | } 230 | try: 231 | sid = self._db.info.find_one({'type': 'status'})['_id'] 232 | self._db.info.update({'_id': sid}, status) 233 | except Exception: 234 | self._db.info.insert(status) 235 | 236 | def get_status(self): 237 | status = self._db.info.find_one({'type': 'status'}) 238 | status.pop("_id") 239 | return status 240 | 241 | def get_pkg_required_by(self, pkgname): 242 | provides = set(self.get_pkg(pkgname)['provides']) 243 | pkgs = [] 244 | for pkg in AurBSConfig().aurpkgs: 245 | try: 246 | pkg = self.get_pkg(pkg) 247 | if provides.intersection(pkg['depends']): 248 | pkgs.append(pkg['name']) 249 | except KeyError: 250 | pass 251 | return pkgs 252 | 253 | def cleanup_orphaned(self): 254 | # cleanup pkgs and results, that are not in AurBSConfig().aurpkgs 255 | for pkg in self._db.packages.find({}): 256 | if pkg['name'] not in AurBSConfig().aurpkgs: 257 | self._db.packages.remove({'name': pkg['name']}) 258 | log.info("Cleanup orphaned db pkg-entry: %s" % pkg['name']) 259 | for rtype in ['build', 'problem']: 260 | for result in self.get_results(rtype): 261 | if result['name'] not in AurBSConfig().aurpkgs: 262 | self._db["%ss" % rtype].remove({'name': result['name']}) 263 | log.info("Cleanup orphaned db result-entry: %s" % result['name']) 264 | 265 | def filter_dependencies(self, args, local=True, nofilter=False): 266 | deps = set() 267 | for arg in args: 268 | deps = deps.union(arg) 269 | if nofilter: 270 | return deps 271 | pkgs = set(AurBSConfig().aurpkgs) 272 | pkgs = pkgs.union(self.get_all_provides()) 273 | if local: 274 | return [d for d in deps if d in pkgs] 275 | else: 276 | return [d for d in deps if d not in pkgs] 277 | -------------------------------------------------------------------------------- /aurbs/ui/static/bootstrap/css/bootstrap-responsive.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Responsive v2.3.1 3 | * 4 | * Copyright 2012 Twitter, Inc 5 | * Licensed under the Apache License v2.0 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Designed and built with all the love in the world @twitter by @mdo and @fat. 9 | */.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}@-ms-viewport{width:device-width}.hidden{display:none;visibility:hidden}.visible-phone{display:none!important}.visible-tablet{display:none!important}.hidden-desktop{display:none!important}.visible-desktop{display:inherit!important}@media(min-width:768px) and (max-width:979px){.hidden-desktop{display:inherit!important}.visible-desktop{display:none!important}.visible-tablet{display:inherit!important}.hidden-tablet{display:none!important}}@media(max-width:767px){.hidden-desktop{display:inherit!important}.visible-desktop{display:none!important}.visible-phone{display:inherit!important}.hidden-phone{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:inherit!important}.hidden-print{display:none!important}}@media(min-width:1200px){.row{margin-left:-30px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;min-height:1px;margin-left:30px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:1170px}.span12{width:1170px}.span11{width:1070px}.span10{width:970px}.span9{width:870px}.span8{width:770px}.span7{width:670px}.span6{width:570px}.span5{width:470px}.span4{width:370px}.span3{width:270px}.span2{width:170px}.span1{width:70px}.offset12{margin-left:1230px}.offset11{margin-left:1130px}.offset10{margin-left:1030px}.offset9{margin-left:930px}.offset8{margin-left:830px}.offset7{margin-left:730px}.offset6{margin-left:630px}.offset5{margin-left:530px}.offset4{margin-left:430px}.offset3{margin-left:330px}.offset2{margin-left:230px}.offset1{margin-left:130px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.564102564102564%;*margin-left:2.5109110747408616%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .controls-row [class*="span"]+[class*="span"]{margin-left:2.564102564102564%}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.45299145299145%;*width:91.39979996362975%}.row-fluid .span10{width:82.90598290598291%;*width:82.8527914166212%}.row-fluid .span9{width:74.35897435897436%;*width:74.30578286961266%}.row-fluid .span8{width:65.81196581196582%;*width:65.75877432260411%}.row-fluid .span7{width:57.26495726495726%;*width:57.21176577559556%}.row-fluid .span6{width:48.717948717948715%;*width:48.664757228587014%}.row-fluid .span5{width:40.17094017094017%;*width:40.11774868157847%}.row-fluid .span4{width:31.623931623931625%;*width:31.570740134569924%}.row-fluid .span3{width:23.076923076923077%;*width:23.023731587561375%}.row-fluid .span2{width:14.52991452991453%;*width:14.476723040552828%}.row-fluid .span1{width:5.982905982905983%;*width:5.929714493544281%}.row-fluid .offset12{margin-left:105.12820512820512%;*margin-left:105.02182214948171%}.row-fluid .offset12:first-child{margin-left:102.56410256410257%;*margin-left:102.45771958537915%}.row-fluid .offset11{margin-left:96.58119658119658%;*margin-left:96.47481360247316%}.row-fluid .offset11:first-child{margin-left:94.01709401709402%;*margin-left:93.91071103837061%}.row-fluid .offset10{margin-left:88.03418803418803%;*margin-left:87.92780505546462%}.row-fluid .offset10:first-child{margin-left:85.47008547008548%;*margin-left:85.36370249136206%}.row-fluid .offset9{margin-left:79.48717948717949%;*margin-left:79.38079650845607%}.row-fluid .offset9:first-child{margin-left:76.92307692307693%;*margin-left:76.81669394435352%}.row-fluid .offset8{margin-left:70.94017094017094%;*margin-left:70.83378796144753%}.row-fluid .offset8:first-child{margin-left:68.37606837606839%;*margin-left:68.26968539734497%}.row-fluid .offset7{margin-left:62.393162393162385%;*margin-left:62.28677941443899%}.row-fluid .offset7:first-child{margin-left:59.82905982905982%;*margin-left:59.72267685033642%}.row-fluid .offset6{margin-left:53.84615384615384%;*margin-left:53.739770867430444%}.row-fluid .offset6:first-child{margin-left:51.28205128205128%;*margin-left:51.175668303327875%}.row-fluid .offset5{margin-left:45.299145299145295%;*margin-left:45.1927623204219%}.row-fluid .offset5:first-child{margin-left:42.73504273504273%;*margin-left:42.62865975631933%}.row-fluid .offset4{margin-left:36.75213675213675%;*margin-left:36.645753773413354%}.row-fluid .offset4:first-child{margin-left:34.18803418803419%;*margin-left:34.081651209310785%}.row-fluid .offset3{margin-left:28.205128205128204%;*margin-left:28.0987452264048%}.row-fluid .offset3:first-child{margin-left:25.641025641025642%;*margin-left:25.53464266230224%}.row-fluid .offset2{margin-left:19.65811965811966%;*margin-left:19.551736679396257%}.row-fluid .offset2:first-child{margin-left:17.094017094017094%;*margin-left:16.98763411529369%}.row-fluid .offset1{margin-left:11.11111111111111%;*margin-left:11.004728132387708%}.row-fluid .offset1:first-child{margin-left:8.547008547008547%;*margin-left:8.440625568285142%}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:30px}input.span12,textarea.span12,.uneditable-input.span12{width:1156px}input.span11,textarea.span11,.uneditable-input.span11{width:1056px}input.span10,textarea.span10,.uneditable-input.span10{width:956px}input.span9,textarea.span9,.uneditable-input.span9{width:856px}input.span8,textarea.span8,.uneditable-input.span8{width:756px}input.span7,textarea.span7,.uneditable-input.span7{width:656px}input.span6,textarea.span6,.uneditable-input.span6{width:556px}input.span5,textarea.span5,.uneditable-input.span5{width:456px}input.span4,textarea.span4,.uneditable-input.span4{width:356px}input.span3,textarea.span3,.uneditable-input.span3{width:256px}input.span2,textarea.span2,.uneditable-input.span2{width:156px}input.span1,textarea.span1,.uneditable-input.span1{width:56px}.thumbnails{margin-left:-30px}.thumbnails>li{margin-left:30px}.row-fluid .thumbnails{margin-left:0}}@media(min-width:768px) and (max-width:979px){.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;min-height:1px;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:724px}.span12{width:724px}.span11{width:662px}.span10{width:600px}.span9{width:538px}.span8{width:476px}.span7{width:414px}.span6{width:352px}.span5{width:290px}.span4{width:228px}.span3{width:166px}.span2{width:104px}.span1{width:42px}.offset12{margin-left:764px}.offset11{margin-left:702px}.offset10{margin-left:640px}.offset9{margin-left:578px}.offset8{margin-left:516px}.offset7{margin-left:454px}.offset6{margin-left:392px}.offset5{margin-left:330px}.offset4{margin-left:268px}.offset3{margin-left:206px}.offset2{margin-left:144px}.offset1{margin-left:82px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.7624309392265194%;*margin-left:2.709239449864817%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .controls-row [class*="span"]+[class*="span"]{margin-left:2.7624309392265194%}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.43646408839778%;*width:91.38327259903608%}.row-fluid .span10{width:82.87292817679558%;*width:82.81973668743387%}.row-fluid .span9{width:74.30939226519337%;*width:74.25620077583166%}.row-fluid .span8{width:65.74585635359117%;*width:65.69266486422946%}.row-fluid .span7{width:57.18232044198895%;*width:57.12912895262725%}.row-fluid .span6{width:48.61878453038674%;*width:48.56559304102504%}.row-fluid .span5{width:40.05524861878453%;*width:40.00205712942283%}.row-fluid .span4{width:31.491712707182323%;*width:31.43852121782062%}.row-fluid .span3{width:22.92817679558011%;*width:22.87498530621841%}.row-fluid .span2{width:14.3646408839779%;*width:14.311449394616199%}.row-fluid .span1{width:5.801104972375691%;*width:5.747913483013988%}.row-fluid .offset12{margin-left:105.52486187845304%;*margin-left:105.41847889972962%}.row-fluid .offset12:first-child{margin-left:102.76243093922652%;*margin-left:102.6560479605031%}.row-fluid .offset11{margin-left:96.96132596685082%;*margin-left:96.8549429881274%}.row-fluid .offset11:first-child{margin-left:94.1988950276243%;*margin-left:94.09251204890089%}.row-fluid .offset10{margin-left:88.39779005524862%;*margin-left:88.2914070765252%}.row-fluid .offset10:first-child{margin-left:85.6353591160221%;*margin-left:85.52897613729868%}.row-fluid .offset9{margin-left:79.8342541436464%;*margin-left:79.72787116492299%}.row-fluid .offset9:first-child{margin-left:77.07182320441989%;*margin-left:76.96544022569647%}.row-fluid .offset8{margin-left:71.2707182320442%;*margin-left:71.16433525332079%}.row-fluid .offset8:first-child{margin-left:68.50828729281768%;*margin-left:68.40190431409427%}.row-fluid .offset7{margin-left:62.70718232044199%;*margin-left:62.600799341718584%}.row-fluid .offset7:first-child{margin-left:59.94475138121547%;*margin-left:59.838368402492065%}.row-fluid .offset6{margin-left:54.14364640883978%;*margin-left:54.037263430116376%}.row-fluid .offset6:first-child{margin-left:51.38121546961326%;*margin-left:51.27483249088986%}.row-fluid .offset5{margin-left:45.58011049723757%;*margin-left:45.47372751851417%}.row-fluid .offset5:first-child{margin-left:42.81767955801105%;*margin-left:42.71129657928765%}.row-fluid .offset4{margin-left:37.01657458563536%;*margin-left:36.91019160691196%}.row-fluid .offset4:first-child{margin-left:34.25414364640884%;*margin-left:34.14776066768544%}.row-fluid .offset3{margin-left:28.45303867403315%;*margin-left:28.346655695309746%}.row-fluid .offset3:first-child{margin-left:25.69060773480663%;*margin-left:25.584224756083227%}.row-fluid .offset2{margin-left:19.88950276243094%;*margin-left:19.783119783707537%}.row-fluid .offset2:first-child{margin-left:17.12707182320442%;*margin-left:17.02068884448102%}.row-fluid .offset1{margin-left:11.32596685082873%;*margin-left:11.219583872105325%}.row-fluid .offset1:first-child{margin-left:8.56353591160221%;*margin-left:8.457152932878806%}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:710px}input.span11,textarea.span11,.uneditable-input.span11{width:648px}input.span10,textarea.span10,.uneditable-input.span10{width:586px}input.span9,textarea.span9,.uneditable-input.span9{width:524px}input.span8,textarea.span8,.uneditable-input.span8{width:462px}input.span7,textarea.span7,.uneditable-input.span7{width:400px}input.span6,textarea.span6,.uneditable-input.span6{width:338px}input.span5,textarea.span5,.uneditable-input.span5{width:276px}input.span4,textarea.span4,.uneditable-input.span4{width:214px}input.span3,textarea.span3,.uneditable-input.span3{width:152px}input.span2,textarea.span2,.uneditable-input.span2{width:90px}input.span1,textarea.span1,.uneditable-input.span1{width:28px}}@media(max-width:767px){body{padding-right:20px;padding-left:20px}.navbar-fixed-top,.navbar-fixed-bottom,.navbar-static-top{margin-right:-20px;margin-left:-20px}.container-fluid{padding:0}.dl-horizontal dt{float:none;width:auto;clear:none;text-align:left}.dl-horizontal dd{margin-left:0}.container{width:auto}.row-fluid{width:100%}.row,.thumbnails{margin-left:0}.thumbnails>li{float:none;margin-left:0}[class*="span"],.uneditable-input[class*="span"],.row-fluid [class*="span"]{display:block;float:none;width:100%;margin-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.span12,.row-fluid .span12{width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="offset"]:first-child{margin-left:0}.input-large,.input-xlarge,.input-xxlarge,input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.input-prepend input,.input-append input,.input-prepend input[class*="span"],.input-append input[class*="span"]{display:inline-block;width:auto}.controls-row [class*="span"]+[class*="span"]{margin-left:0}.modal{position:fixed;top:20px;right:20px;left:20px;width:auto;margin:0}.modal.fade{top:-100px}.modal.fade.in{top:20px}}@media(max-width:480px){.nav-collapse{-webkit-transform:translate3d(0,0,0)}.page-header h1 small{display:block;line-height:20px}input[type="checkbox"],input[type="radio"]{border:1px solid #ccc}.form-horizontal .control-label{float:none;width:auto;padding-top:0;text-align:left}.form-horizontal .controls{margin-left:0}.form-horizontal .control-list{padding-top:0}.form-horizontal .form-actions{padding-right:10px;padding-left:10px}.media .pull-left,.media .pull-right{display:block;float:none;margin-bottom:10px}.media-object{margin-right:0;margin-left:0}.modal{top:10px;right:10px;left:10px}.modal-header .close{padding:10px;margin:-10px}.carousel-caption{position:static}}@media(max-width:979px){body{padding-top:0}.navbar-fixed-top,.navbar-fixed-bottom{position:static}.navbar-fixed-top{margin-bottom:20px}.navbar-fixed-bottom{margin-top:20px}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding:5px}.navbar .container{width:auto;padding:0}.navbar .brand{padding-right:10px;padding-left:10px;margin:0 0 0 -5px}.nav-collapse{clear:both}.nav-collapse .nav{float:none;margin:0 0 10px}.nav-collapse .nav>li{float:none}.nav-collapse .nav>li>a{margin-bottom:2px}.nav-collapse .nav>.divider-vertical{display:none}.nav-collapse .nav .nav-header{color:#777;text-shadow:none}.nav-collapse .nav>li>a,.nav-collapse .dropdown-menu a{padding:9px 15px;font-weight:bold;color:#777;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.nav-collapse .btn{padding:4px 10px 4px;font-weight:normal;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.nav-collapse .dropdown-menu li+li a{margin-bottom:2px}.nav-collapse .nav>li>a:hover,.nav-collapse .nav>li>a:focus,.nav-collapse .dropdown-menu a:hover,.nav-collapse .dropdown-menu a:focus{background-color:#f2f2f2}.navbar-inverse .nav-collapse .nav>li>a,.navbar-inverse .nav-collapse .dropdown-menu a{color:#999}.navbar-inverse .nav-collapse .nav>li>a:hover,.navbar-inverse .nav-collapse .nav>li>a:focus,.navbar-inverse .nav-collapse .dropdown-menu a:hover,.navbar-inverse .nav-collapse .dropdown-menu a:focus{background-color:#111}.nav-collapse.in .btn-group{padding:0;margin-top:5px}.nav-collapse .dropdown-menu{position:static;top:auto;left:auto;display:none;float:none;max-width:none;padding:0;margin:0 15px;background-color:transparent;border:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.nav-collapse .open>.dropdown-menu{display:block}.nav-collapse .dropdown-menu:before,.nav-collapse .dropdown-menu:after{display:none}.nav-collapse .dropdown-menu .divider{display:none}.nav-collapse .nav>li>.dropdown-menu:before,.nav-collapse .nav>li>.dropdown-menu:after{display:none}.nav-collapse .navbar-form,.nav-collapse .navbar-search{float:none;padding:10px 15px;margin:10px 0;border-top:1px solid #f2f2f2;border-bottom:1px solid #f2f2f2;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1)}.navbar-inverse .nav-collapse .navbar-form,.navbar-inverse .nav-collapse .navbar-search{border-top-color:#111;border-bottom-color:#111}.navbar .nav-collapse .nav.pull-right{float:none;margin-left:0}.nav-collapse,.nav-collapse.collapse{height:0;overflow:hidden}.navbar .btn-navbar{display:block}.navbar-static .navbar-inner{padding-right:10px;padding-left:10px}}@media(min-width:980px){.nav-collapse.collapse{height:auto!important;overflow:visible!important}} 10 | -------------------------------------------------------------------------------- /bin/aurbs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import sys 4 | import os 5 | import shutil 6 | import subprocess 7 | import logging 8 | import logging.handlers 9 | import argparse 10 | import signal 11 | from pwd import getpwnam 12 | from contextlib import suppress 13 | 14 | from aurbs import aur 15 | from aurbs.config import AurBSConfig 16 | from aurbs.db import Database 17 | from aurbs.webserver import WebServer 18 | from aurbs.model import Dependency, FatalError 19 | from aurbs import subproc 20 | 21 | from aurbs.static import * 22 | from aurbs.helper import * 23 | from aurbs.model import * 24 | 25 | def signal_handler(signal, frame): 26 | sys.exit(128 + signal) 27 | 28 | signal.signal(signal.SIGINT, signal_handler) 29 | signal.signal(signal.SIGTERM, signal_handler) 30 | 31 | parser = argparse.ArgumentParser(description='AUR Build Service') 32 | parser.add_argument('pkg', nargs='?', help='Package to build') 33 | parser.add_argument('--syslog', action='store_true', help='Log to syslog') 34 | parser.add_argument('-c', '--config', default='/etc/aurbs.yml', help='Set alternative config file') 35 | parser.add_argument('-C', '--cache', action='store_true', help='Use cached pkg sources') 36 | parser.add_argument('-v', '--verbose', action='store_true', help='Set log to DEBUG') 37 | parser.add_argument('-s', '--strict', action='store_true', help='Exit on build failures') 38 | parser.add_argument('-f', '--force', action='store_true', help='Force rebuild') 39 | parser.add_argument('-F', '--forceall', action='store_true', help='Force rebuild also dependencies') 40 | parser.add_argument('-S', '--forcesync', action='store_true', help='Force resync all packages') 41 | parser.add_argument('-a', '--arch', help='Set build architecture') 42 | args = parser.parse_args() 43 | 44 | 45 | log = logging.getLogger('aurbs') 46 | loglevel = logging.DEBUG if args.verbose else logging.INFO 47 | log.setLevel(loglevel) 48 | if args.syslog: 49 | loghandler = logging.handlers.SysLogHandler(address = '/dev/log') 50 | else: 51 | loghandler = logging.StreamHandler(sys.stdout) 52 | formatter = logging.Formatter('[%(asctime)s] %(levelname)+8s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') 53 | loghandler.setFormatter(formatter) 54 | log.addHandler(loghandler) 55 | 56 | 57 | def publish_pkg(pkgbase, pkgname, arch, version): 58 | pkg = db.get_pkg(pkgbase) 59 | arch_publish = 'any' if pkg['arch'][0] == 'any' else arch 60 | filename = '%s-%s-%s.pkg.tar.xz' % (pkgname, version, arch_publish) 61 | repo_archs = AurBSConfig().architectures if arch_publish == 'any' else [arch] 62 | 63 | # Delete old file from repo and repodb 64 | for repo_arch in repo_archs: 65 | for item in find_pkg_files(pkgname, directory=repodir(repo_arch)): 66 | [ipkgname, ipkgver, ipkgrel, iarch] = item.rsplit("-", 3) 67 | log.debug("Removing '%s' from %s repo db" % (ipkgname, repo_arch)) 68 | with suppress(OSError): 69 | subproc.ccall(['repo-remove', 'aurstaging.db.tar.gz', ipkgname], cwd=repodir(repo_arch)) 70 | os.remove(os.path.join(repodir(repo_arch), item)) 71 | for item in find_pkg_files(pkgname, directory=repodir('any')): 72 | os.remove(os.path.join(repodir('any'), item)) 73 | 74 | # Prevent old pkg being cached 75 | if os.path.isfile(os.path.join(cachedir, filename)): 76 | os.remove(os.path.join(cachedir, filename)) 77 | 78 | shutil.copyfile(os.path.join(build_dir(arch), pkgbase, filename), os.path.join(repodir(arch_publish), filename)) 79 | for repo_arch in repo_archs: 80 | if arch_publish == 'any': 81 | os.symlink(os.path.join('..', arch_publish, filename), os.path.join(repodir(repo_arch), filename)) 82 | log.debug("Adding '%s' to %s repo db" % (filename, repo_arch)) 83 | subproc.ccall(['repo-add', 'aurstaging.db.tar.gz', filename], cwd=repodir(repo_arch)) 84 | 85 | def make_pkg(pkgname, arch): 86 | pkg = db.get_pkg(pkgname) 87 | 88 | # Remember dependencies 89 | deps = [] 90 | for dep in db.filter_dependencies([pkg['depends'], pkg['makedepends']], local=False): 91 | try: 92 | deps.append({'name': dep, 'version': remote_pkgver(dep, arch)}) 93 | except KeyError: 94 | log.error("Build: Dependency '%s' for '%s' not found!" % (dep, pkgname)) 95 | for dep in db.filter_dependencies([pkg['depends'], pkg['makedepends']], local=True): 96 | with suppress(KeyError): 97 | dep = db.get_provider(dep)['name'] 98 | try: 99 | dep_build = db.get_result(dep, build_arch=arch, rtype='build') 100 | deps.append({'name': dep, 'version': dep_build['version'], 'release': dep_build['release']}) 101 | except KeyError: 102 | log.error("Build: Dependency '%s' for '%s' not found!" % (dep, pkgname)) 103 | 104 | # Compute new release 105 | try: 106 | build = db.get_result(pkgname, build_arch=arch, rtype='build') 107 | if version_newer(build['version'], pkg['version']): 108 | release = 1 109 | else: 110 | release = build['release'] + 1 111 | except KeyError: 112 | release = 1 113 | 114 | log.warning("Building pkg: %s" % pkgname) 115 | 116 | build_dir_pkg = os.path.join(build_dir(arch), pkgname) 117 | src_pkg = os.path.join('/var/cache/aurbs/srcpkgs', '%s.tar.gz' % pkgname) 118 | 119 | # Create the directory to prevent pkgs exploiting other pkgs (tarbombs) 120 | try: 121 | os.mkdir(build_dir_pkg) 122 | except FileExistsError: 123 | # delete old output pkgs 124 | for filename in find_pkg_files(pkgname, directory=build_dir_pkg): 125 | os.remove(os.path.join(build_dir_pkg, filename)) 126 | # delete old logfiles 127 | for filename in os.listdir(build_dir_pkg): 128 | if filename.endswith('.log'): 129 | os.remove(os.path.join(build_dir_pkg, filename)) 130 | if not args.cache: 131 | subproc.ccall(['bsdtar', '--uname', 'aurbs', '--gname', 'daemon', '--strip-components', '1', '-xvf', src_pkg], cwd=build_dir_pkg, 132 | stdout=open("/dev/null", 'w'), 133 | stderr=subprocess.STDOUT, 134 | interrupt=signal.SIGKILL 135 | ) 136 | 137 | # Hack to fix bad pkgs having 600/700 dependencies 138 | set_chmod(build_dir_pkg, dirs=0o755, files=0o644) 139 | 140 | # Inject release into pkgrel in PKGBUILD 141 | # this is needed to allow clients to track rebuilds 142 | pkgrel = '%s.%i' % (pkg['version'].rsplit("-", 1)[1], release) 143 | with open(os.path.join(build_dir_pkg, 'PKGBUILD'), 'a') as PKGBUILD: 144 | PKGBUILD.write("\n# Injected by aurbs to track rebuilds\n") 145 | PKGBUILD.write("pkgrel=%s\n" % pkgrel) 146 | 147 | # create fresh gpg directory 148 | gpgdir = "/var/cache/aurbs/.gnupg" 149 | shutil.rmtree(gpgdir, ignore_errors=True) 150 | os.mkdir(gpgdir) 151 | os.chown(gpgdir, getpwnam('aurbs').pw_uid, getpwnam('aurbs').pw_gid) 152 | shutil.copy("/usr/share/aurbs/cfg/gpg.conf", os.path.join(gpgdir, "gpg.conf")) 153 | 154 | # hack to allow makepkg to download sources as aurbs user 155 | os.chown(build_dir_pkg, getpwnam('aurbs').pw_uid, getpwnam('aurbs').pw_gid) 156 | 157 | build_logfile = os.path.join(build_dir_pkg, "makepkg.log") 158 | 159 | try: 160 | env = os.environ.copy() 161 | env["SUDO_UID"] = str(getpwnam('aurbs').pw_uid) # for running makepkg in the chroot 162 | env["SUDO_USER"] = "aurbs" # for downloading the sources (outside chroot) 163 | subproc.ccall([ 164 | 'makechrootpkg', '-cu', '-l', 'build', 165 | '-d', "%s:/build/.ccache" % ccache_dir(arch), 166 | '-d', "/var/cache/aurbs/.gnupg:/build/.gnupg", 167 | '-r', chroot(arch), 168 | '--', '--noprogressbar'], 169 | cwd=build_dir_pkg, 170 | env=env, 171 | stdout=open(build_logfile, 'w'), 172 | stderr=subprocess.STDOUT, 173 | interrupt=signal.SIGINT, 174 | int_active_child=True 175 | ) 176 | for item in find_pkg_files(pkg.get('splitpkgs', pkgname), directory=build_dir_pkg): 177 | [ipkgname, ipkgver, ipkgrel, iarch] = item.rsplit("-", 3) 178 | log.info("Publishing pkg '%s'" % item) 179 | ver_publish = '%s-%s' % (ipkgver, ipkgrel) 180 | publish_pkg(pkgname, ipkgname, arch, ver_publish) 181 | # Cleanup built pkg 182 | os.remove(os.path.join(build_dir_pkg, item)) 183 | db.set_result(pkgname, arch, 'build', linkdepends=deps, release=release) 184 | log.warning("Done building '%s'" % pkgname) 185 | return True 186 | except Exception as e: 187 | log.warning("Error: %s" % e) 188 | log.error("Failed building '%s' - for details see %s" % (pkgname, build_logfile)) 189 | db.set_result(pkgname, arch, 'problem', ptype='fail', linkdepends=deps) 190 | if args.strict: 191 | raise FatalError('Build Failure') 192 | return False 193 | 194 | def check_pkg(pkgname, arch, do_build=False): 195 | if pkgname in pkg_checked: 196 | return pkg_checked[pkgname] 197 | 198 | # set status 199 | if batch_mode: 200 | pkgs_scheduled = [e for e in filter(lambda i: i not in pkg_checked, AurBSConfig().aurpkgs)] 201 | pkgs_scheduled.remove(pkgname) 202 | pkgs_done = [e for e in pkg_checked.keys()] 203 | else: 204 | pkgs_scheduled = [] 205 | pkgs_done = [] 206 | db.set_status(pkgs_scheduled, pkgs_done, building=pkgname, arch=arch) 207 | 208 | build_blocked = False 209 | problem_depends = [] 210 | 211 | log.debug("Checking local pkg: %s" % pkgname) 212 | pkg_local = db.get_pkg(pkgname) 213 | 214 | # Check against previous build 215 | try: 216 | pkg_build = db.get_result(pkgname, build_arch=arch, rtype='build') 217 | # Check version changed 218 | if version_newer(pkg_build['version'], pkg_local['version']): 219 | log.warning("AUR-PKG '%s' outdated build --> rebuilding" % pkgname) 220 | do_build = True 221 | except KeyError: 222 | log.warning("No build for AUR-PKG '%s' --> building" % pkgname) 223 | do_build = True 224 | pkg_build = False 225 | 226 | # Check for remote dependency updates and missing remote deps 227 | remote_deps = db.filter_dependencies([pkg_local['depends']], local=False) 228 | for dep in remote_deps: 229 | try: 230 | ver_remote = remote_pkgver(dep, arch) 231 | except KeyError: 232 | log.error("Dependency '%s' for '%s' not found! Build blocked." % (dep, pkgname)) 233 | build_blocked = True 234 | problem_depends.append(dep) 235 | continue 236 | if pkg_build and not do_build: 237 | try: 238 | ver_local = by_name(pkg_build['linkdepends'], dep)['version'] 239 | if version_newer(ver_local, ver_remote): 240 | log.warning("Remote Dependency '%s' of AUR-PKG '%s' updated (%s -> %s) --> rebuilding" % (dep, pkgname, ver_local, ver_remote)) 241 | do_build = True 242 | except (KeyError, StopIteration): 243 | # This only happens if a packager added a dependency 244 | # without increasing the pkgrel 245 | log.warning("Remote Dependency '%s' of AUR-PKG '%s' added (%s) --> rebuilding" % (dep, pkgname, ver_remote)) 246 | do_build = True 247 | 248 | if build_blocked: 249 | db.set_result(pkgname, arch, 'problem', ptype='missing_depends', depends=problem_depends) 250 | if args.strict: 251 | raise FatalError("Dependencies for pkg '%s' missing: %s" % (pkgname, ', '.join(problem_depends))) 252 | pkg_checked[pkgname] = Dependency.blocked 253 | return pkg_checked[pkgname] 254 | 255 | # Check for local dependencs updates 256 | local_deps = db.filter_dependencies([pkg_local['depends'], pkg_local['makedepends']], local=True) 257 | for dep in local_deps: 258 | # Rebuild trigger 259 | with suppress(KeyError): 260 | dep = db.get_provider(dep)['name'] 261 | dep_res = check_pkg(dep, arch, args.forceall) 262 | if dep_res == Dependency.rebuilt: 263 | log.warning("Local Dependency '%s' of AUR-PKG '%s' rebuilt --> rebuilding" % (dep, pkgname)) 264 | do_build = True 265 | elif dep_res == Dependency.blocked: 266 | problem_depends.append(dep) 267 | build_blocked = True 268 | # New dependency build available 269 | # any pkg's are not rebuilt, as this would lead to building them still for each arch 270 | elif not do_build and not pkg_local['arch'][0] == 'any': 271 | # pkg_build IS set here - otherwise do_build would be true 272 | try: 273 | dep_build_release_link = by_name(pkg_build['linkdepends'], dep)['release'] 274 | dep_build_release_available = db.get_result(dep, build_arch=arch, rtype='build')['release'] 275 | if dep_build_release_link < dep_build_release_available: 276 | log.warning("Local Dependency '%s' of AUR-PKG '%s' updated --> rebuilding" % (dep, pkgname)) 277 | do_build = True 278 | except StopIteration: 279 | log.warning("Local Dependency '%s' of AUR-PKG '%s' updated --> rebuilding" % (dep, pkgname)) 280 | do_build = True 281 | 282 | if build_blocked: 283 | db.set_result(pkgname, arch, 'problem', ptype='blocked_depends', depends=problem_depends) 284 | if args.strict: 285 | raise FatalError("Dependencies for pkg '%s' blocked: %s" % (pkgname, ', '.join(problem_depends))) 286 | pkg_checked[pkgname] = Dependency.blocked 287 | return pkg_checked[pkgname] 288 | 289 | db.delete_result(pkgname, arch, 'problem') 290 | if do_build: 291 | db.set_status(pkgs_scheduled, pkgs_done, building=pkgname, arch=arch) 292 | if make_pkg(pkgname, arch): 293 | pkg_checked[pkgname] = Dependency.rebuilt 294 | else: 295 | pkg_checked[pkgname] = Dependency.blocked 296 | # we already have a db.set_fail, so no set_block 297 | else: 298 | pkg_checked[pkgname] = Dependency.ok 299 | return pkg_checked[pkgname] 300 | 301 | def sync_pkg(pkgname, force_sync=False): 302 | log.debug("Inquiring local pkg: %s" % pkgname) 303 | 304 | try: 305 | pkg_aur = aur.get_pkg(pkgname) 306 | except Exception: 307 | db.set_result(pkgname, arch, 'problem', ptype='not_in_aur') 308 | if args.strict: 309 | raise FatalError('PKG not in AUR: %s' % pkgname) 310 | pkg_checked[pkgname] = Dependency.blocked 311 | 312 | # Check PKG in local db & up to date 313 | try: 314 | try: 315 | pkg_local = db.get_pkg(pkgname) 316 | src_pkg = os.path.join('/var/cache/aurbs/srcpkgs', '%s.tar.gz' % pkgname) 317 | if not os.path.exists(src_pkg): 318 | log.warning("AUR-PKG '%s' src-pkg not found --> syncing" % pkgname) 319 | raise PKGSyncNeeded() 320 | except KeyError: 321 | log.warning("AUR-PKG '%s' not found in local db --> syncing" % pkgname) 322 | raise PKGSyncNeeded() 323 | if version_newer(pkg_local['version'], pkg_aur['version']): 324 | log.warning("AUR-PKG '%s' outdated in local db --> resyncing" % pkgname) 325 | raise PKGSyncNeeded() 326 | if force_sync: 327 | log.warning("AUR-PKG '%s' forced resync via CLI --> resyncing" % pkgname) 328 | raise PKGSyncNeeded() 329 | else: 330 | # Refresh unimportant pkg details (votes, new maintainer, etc) 331 | db.update_pkg(pkgname, pkg_aur) 332 | except PKGSyncNeeded: 333 | db.sync_pkg(pkgname) 334 | pkg_local = db.get_pkg(pkgname) 335 | 336 | 337 | # Initialize config 338 | log.debug("Reading config from '%s'" % args.config) 339 | AurBSConfig(args.config) 340 | 341 | 342 | webserver = WebServer(repodir(""), 8024) 343 | 344 | batch_mode = not (args.arch or args.pkg) 345 | 346 | try: 347 | # Create database connection 348 | db = Database() 349 | 350 | if batch_mode: 351 | # Delete all db entries (pkgs, problems and builds), that are not in AurBSConfig().aurpkgs 352 | db.cleanup_orphaned() 353 | 354 | # Delete all srcpkgs, that are not in AurBSConfig().aurpkgs 355 | for srcpkg in os.listdir(srcpkgdir): 356 | if srcpkg.replace('.tar.gz', '') not in AurBSConfig().aurpkgs: 357 | log.info("Cleanup orphaned srcpkg: %s" % srcpkg) 358 | os.remove(os.path.join(srcpkgdir, srcpkg)) 359 | 360 | # Delete all build dirs, that don't belong to pkgs in AurBSConfig().aurpkgs 361 | for arch in AurBSConfig().architectures: 362 | for pkg_builddir in os.listdir(build_dir(arch)): 363 | if pkg_builddir not in AurBSConfig().aurpkgs: 364 | log.info("Cleanup orphaned build dir: %s/%s" % (arch, pkg_builddir)) 365 | shutil.rmtree(os.path.join(build_dir(arch), pkg_builddir)) 366 | 367 | # Delete all files and (repo) db entries, that have no matching build 368 | repo_archs = AurBSConfig().architectures.copy() 369 | repo_archs.append("any") 370 | for repo_arch in repo_archs: 371 | for item in find_pkg_files(directory=repodir(repo_arch)): 372 | item_name = item.rsplit(".", 3)[0] 373 | [ipkgname, ipkgver, ipkgrel, iarch] = item_name.rsplit("-", 3) 374 | try: 375 | pkgbase = db.get_pkgbase(ipkgname) 376 | ibuild = db.get_result(pkgbase['name'], arch=iarch, rtype='build') 377 | except KeyError: 378 | os.remove(os.path.join(repodir(repo_arch), item)) 379 | if repo_arch != "any": 380 | with suppress(OSError): 381 | log.info("Cleanup orphaned pkg-file: %s" % ipkgname) 382 | subproc.ccall(['repo-remove', 'aurstaging.db.tar.gz', ipkgname], cwd=repodir(repo_arch)) 383 | 384 | for arch in AurBSConfig().architectures if not args.arch else [args.arch]: 385 | log.info("Building for architecture %s" % arch) 386 | 387 | # Create chroot, if missing 388 | if not os.path.exists(chroot_root(arch)): 389 | log.warning("Creating %s build root" % arch) 390 | subproc.ccall(['mkarchroot', 391 | '-C', '/usr/share/aurbs/cfg/pacman.conf.%s' % arch, 392 | '-M', '/usr/share/aurbs/cfg/makepkg.conf.%s' % arch, 393 | chroot_root(arch), 394 | 'base-devel', 'ccache', 'git'], 395 | interrupt=signal.SIGINT, 396 | int_active_child=True, 397 | stdout=sys.stdout, 398 | stderr=sys.stderr 399 | ) 400 | 401 | # update chroot 402 | log.info("Updating %s build root" % arch) 403 | subproc.ccall( 404 | ["arch-nspawn", chroot_root(arch), "pacman", "-Syu", "--noconfirm", "--noprogressbar"], 405 | interrupt=signal.SIGINT, 406 | int_active_child=True, 407 | ) 408 | 409 | pkg_checked = {} 410 | for pkg in AurBSConfig().aurpkgs if not args.pkg else [args.pkg]: 411 | sync_pkg(pkg, args.forcesync) 412 | for pkg in AurBSConfig().aurpkgs if not args.pkg else [args.pkg]: 413 | res = check_pkg(pkg, arch, args.force or args.forceall) 414 | pkgs_done = [e for e in pkg_checked.keys()] if batch_mode else [pkg] 415 | db.set_status(scheduled=[], done=pkgs_done, arch=arch) 416 | 417 | if batch_mode: 418 | log.warning("Publishing repos") 419 | subproc.ccall([ 420 | "rsync", "-P", "-rlptgo", "--delete", "--include", "*/", "--include", "*.pkg.tar.*", "--exclude", "*", 421 | repodir(""), repodir_public("")], 422 | interrupt=signal.SIGINT 423 | ) 424 | for repo_arch in AurBSConfig().architectures: 425 | log.info("Creating %s repo db" % (repo_arch)) 426 | repodb_name = "%s.db" % AurBSConfig().public_repo['name'] 427 | repodb_file = os.path.join(repodir_public(repo_arch), '%s.tar.gz' % repodb_name) 428 | repodb_link = os.path.join(repodir_public(repo_arch), repodb_name) 429 | filesdb_name = "%s.files" % AurBSConfig().public_repo['name'] 430 | filesdb_file = os.path.join(repodir_public(repo_arch), '%s.tar.gz' % filesdb_name) 431 | filesdb_link = os.path.join(repodir_public(repo_arch), filesdb_name) 432 | for filename in os.listdir(repodir_public(repo_arch)): 433 | for key in [".db", ".db.old", ".db.tar.gz", ".files", "files.tar.gz", "files.tar.gz.old"]: 434 | if filename.endswith(key): 435 | os.remove(os.path.join(repodir_public(repo_arch), filename)) 436 | shutil.copyfile(os.path.join(repodir(repo_arch), 'aurstaging.db.tar.gz'), repodb_file) 437 | shutil.copyfile(os.path.join(repodir(repo_arch), 'aurstaging.files.tar.gz'), filesdb_file) 438 | os.symlink('%s.tar.gz' % repodb_name, repodb_link) 439 | os.symlink('%s.tar.gz' % filesdb_name, filesdb_link) 440 | if AurBSConfig().public_repo['rsync'] is not None and 'url' in AurBSConfig().public_repo['rsync']: 441 | log.warning("RSyncing to remote location") 442 | subproc.ccall([ 443 | "rsync", "-P", "-rlpt", "--delete", 444 | repodir_public(""), AurBSConfig().public_repo['rsync']['url']], 445 | env={"RSYNC_PASSWORD": AurBSConfig().public_repo['rsync'].get('pass', '')}, 446 | interrupt=signal.SIGINT 447 | ) 448 | except FatalError as e: 449 | log.error("Fatal Error: %s" % e) 450 | sys.exit(1) 451 | finally: 452 | webserver.stop() 453 | -------------------------------------------------------------------------------- /aurbs/ui/static/bootstrap/js/bootstrap.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap.js by @fat & @mdo 3 | * Copyright 2013 Twitter, Inc. 4 | * http://www.apache.org/licenses/LICENSE-2.0.txt 5 | */ 6 | !function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()};var r=e.fn.alert;e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e.fn.alert.noConflict=function(){return e.fn.alert=r,this},e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")};var n=e.fn.button;e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e.fn.button.noConflict=function(){return e.fn.button=n,this},e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=n,this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},getActiveIndex:function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},to:function(t){var n=this.getActiveIndex(),r=this;if(t>this.$items.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){r.to(t)}):n==t?this.pause().cycle():this.slide(t>n?"next":"prev",e(this.$items[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle(!0)),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0],direction:o});if(i.hasClass("active"))return;this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var t=e(a.$indicators.children()[a.getActiveIndex()]);t&&t.addClass("active")}));if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}};var n=e.fn.carousel;e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.pause().cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e.fn.carousel.noConflict=function(){return e.fn.carousel=n,this},e(document).on("click.carousel.data-api","[data-slide], [data-slide-to]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data()),o;i.carousel(s),(o=n.attr("data-slide-to"))&&i.data("carousel").pause().to(o).cycle(),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning||this.$element.hasClass("in"))return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning||!this.$element.hasClass("in"))return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}};var n=e.fn.collapse;e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=e.extend({},e.fn.collapse.defaults,r.data(),typeof n=="object"&&n);i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e.fn.collapse.noConflict=function(){return e.fn.collapse=n,this},e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(".dropdown-backdrop").remove(),e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=n&&e(n);if(!r||!r.length)r=t.parent();return r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||("ontouchstart"in document.documentElement&&e('