├── README.rst ├── archrepo2 ├── __init__.py ├── archreposrv.py ├── dbutil.py ├── lib │ ├── __init__.py │ ├── archpkg.py │ └── nicelogger.py └── repomon.py ├── misc ├── archrepo2.ini.example └── archrepo2.service ├── scripts ├── on_update_notification ├── touch_pkg_with_newer_sig ├── upgrade_from_0.2_to_0.3.py └── upgrade_from_0.3_to_0.4.py ├── setup.py └── test ├── instructions.act └── repomon_test.py /README.rst: -------------------------------------------------------------------------------- 1 | USAGE 2 | ===== 3 | 4 | Install:: 5 | 6 | python3 setup.py install 7 | 8 | Edit a copy of ``archrepo.ini.example`` and then run 9 | ``archreposrv ``. 10 | 11 | DEPENDENCIES 12 | ============ 13 | 14 | - Python, >= 3.3, with sqlite support 15 | - setuptools 16 | - tornado, > 3.1 17 | - pyinotify, tested with 0.9.4 18 | - pyalpm, tested with 0.10.6 19 | 20 | NOTE 21 | ==== 22 | 23 | - relative symlinks may be broken when moving to the right architecture 24 | directory 25 | 26 | TODO 27 | ==== 28 | 29 | - [high] adding and then removing it before adding complete will result 30 | in not-in-database removing 31 | - [middle] specify what architectures we have and don't require others 32 | - [low] use one common command queue (now one each repo) 33 | - [low] verify packages 34 | 35 | -------------------------------------------------------------------------------- /archrepo2/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.6dev' 2 | -------------------------------------------------------------------------------- /archrepo2/archreposrv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import configparser 5 | import logging 6 | 7 | from tornado.ioloop import IOLoop 8 | 9 | from .lib.nicelogger import enable_pretty_logging 10 | enable_pretty_logging(logging.DEBUG) 11 | 12 | from .repomon import repomon 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | def check_and_get_repos(config): 17 | repos = config['multi'].get('repos', 'repository') 18 | for field in ('name', 'path'): 19 | if config['multi'].get(field, None) is not None: 20 | raise ValueError('config %r cannot have default value.' % field) 21 | 22 | repos = {repo.strip() for repo in repos.split(',')} 23 | for field in ('name', 'path'): 24 | vals = [config[repo].get(field) for repo in repos] 25 | if len(vals) != len(set(vals)): 26 | raise ValueError('duplicate %s in different repositories.' % field) 27 | 28 | return repos 29 | 30 | def main(): 31 | conffile = sys.argv[1] 32 | config = configparser.ConfigParser(default_section='multi') 33 | config.read(conffile) 34 | repos = check_and_get_repos(config) 35 | 36 | notifiers = [] 37 | for repo in repos: 38 | notifiers.extend(repomon(config[repo])) 39 | 40 | ioloop = IOLoop.current() 41 | logger.info('starting archreposrv.') 42 | try: 43 | ioloop.start() 44 | except KeyboardInterrupt: 45 | for notifier in notifiers: 46 | notifier.stop() 47 | ioloop.close() 48 | print() 49 | 50 | if __name__ == '__main__': 51 | if sys.version_info[:2] < (3, 3): 52 | raise OSError('Python 3.3+ required.') 53 | main() 54 | -------------------------------------------------------------------------------- /archrepo2/dbutil.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | 3 | def getver(db): 4 | try: 5 | ver = tuple(db.execute('select ver from version_info limit 1'))[0][0] 6 | except sqlite3.OperationalError: 7 | ver = '0.1' # This version has no version info 8 | return ver 9 | 10 | def setver(db, ver): 11 | db.execute('''create table if not exists version_info 12 | (ver text)''') 13 | c = tuple(db.execute('select count(*) from version_info'))[0][0] 14 | if c == 1: 15 | db.execute('update version_info set ver=?', (ver,)) 16 | else: 17 | db.execute('insert into version_info (ver) values (?)', (ver,)) 18 | db.commit() 19 | -------------------------------------------------------------------------------- /archrepo2/lib/__init__.py: -------------------------------------------------------------------------------- 1 | ''' 2 | moduels in this directory are taken from `winterpy `_. 3 | 4 | last sync is at 2013-08-23. 5 | ''' 6 | -------------------------------------------------------------------------------- /archrepo2/lib/archpkg.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from collections import namedtuple 5 | import subprocess 6 | import re 7 | from typing import List, Dict 8 | 9 | import pyalpm 10 | 11 | class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')): 12 | def __lt__(self, other) -> bool: 13 | if self.name != other.name or self.arch != other.arch: 14 | return NotImplemented 15 | if self.version != other.version: 16 | return pyalpm.vercmp(self.version, other.version) < 0 17 | return float(self.release) < float(other.release) 18 | 19 | def __gt__(self, other) -> bool: 20 | # No, try the other side please. 21 | return NotImplemented 22 | 23 | @property 24 | def fullversion(self) -> str: 25 | return '%s-%s' % (self.version, self.release) 26 | 27 | @classmethod 28 | def parseFilename(cls, filename: str) -> 'PkgNameInfo': 29 | return cls(*trimext(filename, 3).rsplit('-', 3)) 30 | 31 | def trimext(name: str, num: int = 1) -> str: 32 | for i in range(num): 33 | name = os.path.splitext(name)[0] 34 | return name 35 | 36 | def get_pkgname_with_bash(PKGBUILD: str) -> List[str]: 37 | script = '''\ 38 | . '%s' 39 | echo ${pkgname[*]}''' % PKGBUILD 40 | # Python 3.4 has 'input' arg for check_output 41 | p = subprocess.Popen( 42 | ['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home', 43 | '--tmpfs', '/run', '--die-with-parent', 44 | '--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'], 45 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, 46 | ) 47 | output = p.communicate(script.encode())[0].decode() 48 | ret = p.wait() 49 | if ret != 0: 50 | raise subprocess.CalledProcessError( 51 | ret, ['bash'], output) 52 | return output.split() 53 | 54 | pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$') 55 | 56 | def _strip_ver(s: str) -> str: 57 | return re.sub(r'[<>=].*', '', s) 58 | 59 | def get_package_info(name: str, local: bool = False) -> Dict[str, str]: 60 | old_lang = os.environ['LANG'] 61 | os.environ['LANG'] = 'C' 62 | args = '-Qi' if local else '-Si' 63 | try: 64 | outb = subprocess.check_output(["pacman", args, name]) 65 | out = outb.decode('latin1') 66 | finally: 67 | os.environ['LANG'] = old_lang 68 | 69 | ret = {} 70 | for l in out.splitlines(): 71 | if not l: 72 | continue 73 | if l[0] not in ' \t': 74 | key, value = l.split(':', 1) 75 | key = key.strip() 76 | value = value.strip() 77 | ret[key] = value 78 | else: 79 | ret[key] += ' ' + l.strip() 80 | return ret 81 | 82 | -------------------------------------------------------------------------------- /archrepo2/lib/nicelogger.py: -------------------------------------------------------------------------------- 1 | ''' 2 | A Tornado-inspired logging formatter, with displayed time with millisecond accuracy 3 | 4 | FYI: pyftpdlib also has a Tornado-style logger. 5 | ''' 6 | 7 | from __future__ import annotations 8 | 9 | import sys 10 | import time 11 | import logging 12 | 13 | class TornadoLogFormatter(logging.Formatter): 14 | def __init__(self, color, *args, **kwargs): 15 | super().__init__(*args, **kwargs) 16 | self._color = color 17 | if color: 18 | import curses 19 | curses.setupterm() 20 | if sys.hexversion < 0x30203f0: 21 | fg_color = str(curses.tigetstr("setaf") or 22 | curses.tigetstr("setf") or "", "ascii") 23 | else: 24 | fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" 25 | self._colors = { 26 | logging.DEBUG: str(curses.tparm(fg_color, 4), # Blue 27 | "ascii"), 28 | logging.INFO: str(curses.tparm(fg_color, 2), # Green 29 | "ascii"), 30 | logging.WARNING: str(curses.tparm(fg_color, 3), # Yellow 31 | "ascii"), 32 | logging.ERROR: str(curses.tparm(fg_color, 1), # Red 33 | "ascii"), 34 | logging.CRITICAL: str(curses.tparm(fg_color, 9), # Bright Red 35 | "ascii"), 36 | } 37 | self._normal = str(curses.tigetstr("sgr0"), "ascii") 38 | 39 | def format(self, record): 40 | try: 41 | record.message = record.getMessage() 42 | except Exception as e: 43 | record.message = "Bad message (%r): %r" % (e, record.__dict__) 44 | record.asctime = time.strftime( 45 | "%m-%d %H:%M:%S", self.converter(record.created)) 46 | prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \ 47 | record.__dict__ 48 | if self._color: 49 | prefix = (self._colors.get(record.levelno, self._normal) + 50 | prefix + self._normal) 51 | formatted = prefix + " " + record.message 52 | 53 | formatted += ''.join( 54 | ' %s=%s' % (k, v) for k, v in record.__dict__.items() 55 | if k not in { 56 | 'levelname', 'asctime', 'module', 'lineno', 'args', 'message', 57 | 'filename', 'exc_info', 'exc_text', 'created', 'funcName', 58 | 'processName', 'process', 'msecs', 'relativeCreated', 'thread', 59 | 'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info', 60 | 'taskName', 61 | }) 62 | 63 | if record.exc_info: 64 | if not record.exc_text: 65 | record.exc_text = self.formatException(record.exc_info) 66 | if record.exc_text: 67 | formatted = formatted.rstrip() + "\n" + record.exc_text 68 | return formatted.replace("\n", "\n ") 69 | 70 | def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None): 71 | ''' 72 | handler: specify a handler instead of default StreamHandler 73 | color: boolean, force color to be on / off. Default to be on only when 74 | ``handler`` isn't specified and the term supports color 75 | ''' 76 | logger = logging.getLogger() 77 | if handler is None: 78 | h = logging.StreamHandler() 79 | else: 80 | h = handler 81 | if color is None: 82 | color = False 83 | if handler is None and sys.stderr.isatty(): 84 | try: 85 | import curses 86 | curses.setupterm() 87 | if curses.tigetnum("colors") > 0: 88 | color = True 89 | except: 90 | import traceback 91 | traceback.print_exc() 92 | formatter = TornadoLogFormatter(color=color) 93 | h.setLevel(level) 94 | h.setFormatter(formatter) 95 | logger.setLevel(level) 96 | logger.addHandler(h) 97 | -------------------------------------------------------------------------------- /archrepo2/repomon.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import re 5 | import pwd 6 | import stat 7 | from functools import partial 8 | from itertools import filterfalse 9 | import queue 10 | import logging 11 | import sqlite3 12 | import socket 13 | import time 14 | import hashlib 15 | from os.path import relpath 16 | 17 | import pyinotify 18 | Event = pyinotify.Event 19 | from tornado.ioloop import IOLoop 20 | import tornado.process 21 | 22 | from .lib import archpkg 23 | from . import dbutil 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | def same_existent_file(a, b): 28 | try: 29 | return os.path.samefile(a, b) 30 | except OSError: 31 | return False 32 | 33 | class ActionInfo(archpkg.PkgNameInfo): 34 | def __new__(cls, path, action, four=None, five=None, pkgpath=None): 35 | if four is not None: 36 | return super().__new__(cls, path, action, four, five) 37 | file = os.path.split(pkgpath or path)[1] 38 | self = cls.parseFilename(file) 39 | self.action = action 40 | self.path = path 41 | return self 42 | 43 | def __repr__(self): 44 | return '' % (self.action, self.path) 45 | 46 | class RepoMan: 47 | _timeout = None 48 | _cmd_queue = queue.Queue() 49 | _cmd_running = False 50 | 51 | def __init__(self, config, base, siteman): 52 | self.action = [] 53 | self._ioloop = IOLoop.current() 54 | self._base = base 55 | self._siteman = siteman 56 | 57 | self._repo_dir = config.get('path') 58 | self.name = config.get('name') 59 | self._db_file = os.path.join(base, self.name + '.db.tar.gz') 60 | self._files_name = os.path.join(base, self._db_file.replace('.db.tar.gz', '.files.tar.gz')) 61 | self._command_add = config.get('command-add', 'repo-add') 62 | self._command_remove = config.get('command-remove', 'repo-remove') 63 | self._wait_time = config.getint('wait-time', 10) 64 | self._without_db = config.getboolean('without-db', False) 65 | self._auto_rename = config.getboolean('auto-rename', True) 66 | self._symlink_any = config.getboolean('symlink-any', True) 67 | 68 | def queue_command(self, cmd, callbacks=None): 69 | self._cmd_queue.put((cmd, callbacks)) 70 | if not self._cmd_running: 71 | self.run_command() 72 | 73 | def run_command(self): 74 | if not self._cmd_running: 75 | self._siteman.inc_running() 76 | self.__class__._cmd_running = True 77 | try: 78 | cmd, callbacks = self._cmd_queue.get_nowait() 79 | except queue.Empty: 80 | self.__class__._cmd_running = False 81 | self._siteman.dec_running() 82 | return 83 | 84 | logger.info('Running cmd: %r', cmd) 85 | # no longer have to specify io_loop in Tornado > 3.1. Let's drop them for 86 | # Tornado >= 5 87 | try: 88 | p = tornado.process.Subprocess(cmd) 89 | except OSError: 90 | logger.error('failed to run command.', exc_info=True) 91 | self.run_command() 92 | else: 93 | p.set_exit_callback(partial(self.command_done, callbacks)) 94 | 95 | def command_done(self, callbacks, status): 96 | if status == 0: 97 | if callbacks: 98 | for cb in callbacks: 99 | cb() 100 | logger.info('previous command done.') 101 | else: 102 | logger.warn('previous command failed with status code %d.', status) 103 | self.run_command() 104 | 105 | def _do_cmd(self, cmd, items, callbacks): 106 | cmd1 = [cmd, self._db_file] 107 | cmd1.extend(items) 108 | self.queue_command(cmd1, callbacks) 109 | 110 | def _do_add(self, toadd): 111 | if toadd: 112 | files, callbacks = zip(*toadd) 113 | if self._without_db: 114 | self._do_callbacks(callbacks) 115 | else: 116 | self._do_cmd(self._command_add, files, callbacks) 117 | 118 | def _do_remove(self, toremove): 119 | if toremove: 120 | files, callbacks = zip(*toremove) 121 | if self._without_db: 122 | self._do_callbacks(callbacks) 123 | else: 124 | self._do_cmd(self._command_remove, files, callbacks) 125 | 126 | def _do_callbacks(self, callbacks): 127 | for cb in callbacks: 128 | cb() 129 | 130 | def add_action(self, action): 131 | logger.info('Adding action %r to db %r', action, self._db_file) 132 | self._action_pending(action) 133 | 134 | def _action_pending(self, act): 135 | self.action.append(act) 136 | if self._timeout: 137 | self._ioloop.remove_timeout(self._timeout) 138 | self._timeout = self._ioloop.add_timeout( 139 | self._ioloop.time() + self._wait_time, 140 | self.run, 141 | ) 142 | 143 | def run(self): 144 | self._timeout = None 145 | actions = self.action 146 | self.action = [] 147 | actiondict = {} 148 | for act in actions: 149 | if act.name not in actiondict: 150 | actiondict[act.name] = act 151 | else: 152 | oldact = actiondict[act.name] 153 | if oldact != act: 154 | # different packages, do the latter, but record the former 155 | try: 156 | actiondict[act.name].callback(state=0) 157 | except: 158 | logger.exception('failed to run action %r.', actiondict[act.name]) 159 | # same package, do the latter, and discard the forter 160 | actiondict[act.name] = act 161 | toadd = [(x.path, x.callback) for x in actiondict.values() if x.action == 'add'] 162 | toremove = [(x.name, x.callback) for x in actiondict.values() if x.action == 'remove'] 163 | self._do_add(toadd) 164 | self._do_remove(toremove) 165 | 166 | class EventHandler(pyinotify.ProcessEvent): 167 | _n_running = 0 168 | 169 | def my_init( 170 | self, filter_pkg, supported_archs, config, wm, 171 | ): 172 | 173 | notification_type = config.get( 174 | 'notification-type', 'null') 175 | if notification_type != 'null': 176 | self._notification_addrs = config.get( 177 | 'notification-addresses') 178 | self._notification_secret = config.get( 179 | 'notification-secret') 180 | self.send_notification = getattr( 181 | self, 182 | 'send_notification_' + 183 | notification_type.replace('-', '_'), 184 | ) 185 | 186 | self.filter_pkg = filter_pkg 187 | self.moved_away = {} 188 | self.created = {} 189 | self.repomans = {} 190 | # TODO: use an expiring dict 191 | self.our_links = set() 192 | self._ioloop = IOLoop.current() 193 | 194 | base = config.get('path') 195 | self._lastupdate_file = os.path.join(base, 'lastupdate') 196 | 197 | dbname = config.get('info-db', os.path.join(base, 'pkginfo.db')) 198 | new_db = not os.path.exists(dbname) 199 | self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit 200 | self._db_dir = os.path.dirname(dbname) 201 | if new_db: 202 | dbutil.setver(self._db, '0.4') 203 | else: 204 | assert dbutil.getver(self._db) == '0.4', 'wrong database version, please upgrade (see scripts directory)' 205 | self._db.execute('''create table if not exists pkginfo 206 | (filename text unique, 207 | pkgrepo text, 208 | pkgname text, 209 | pkgarch text, 210 | pkgver text, 211 | forarch text, 212 | owner text, 213 | mtime int, 214 | state int, 215 | info blob)''') 216 | self._db.execute('''create table if not exists sigfiles 217 | (filename text unique, 218 | pkgrepo text)''') 219 | 220 | self._supported_archs = supported_archs 221 | dirs = [os.path.join(base, x) for x in self._supported_archs] 222 | self.files = files = set() 223 | for d in dirs: 224 | os.makedirs(d, exist_ok=True) 225 | for f in os.listdir(d): 226 | p = os.path.join(d, f) 227 | if os.path.exists(p): # filter broken symlinks 228 | files.add(p) 229 | wm.add_watch(d, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE | 230 | pyinotify.IN_CREATE | pyinotify.IN_MOVED_FROM | 231 | pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN) 232 | self.repomans[d] = RepoMan(config, d, self) 233 | self.name = self.repomans[d].name 234 | self._auto_rename = self.repomans[d]._auto_rename 235 | self._symlink_any = self.repomans[d]._symlink_any 236 | 237 | self._initial_update(files) 238 | 239 | def _initial_update(self, files): 240 | oldfiles = {f[0] for f in self._db.execute('select filename from pkginfo where pkgrepo = ?', (self.name,))} 241 | oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,))) 242 | oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles} 243 | 244 | for f in sorted(filterfalse(self.filter_pkg, files - oldfiles), 245 | key=pkgsortkey): 246 | self.dispatch(f, 'add') 247 | 248 | for f in sorted(filterfalse(self.filter_pkg, oldfiles - files), 249 | key=pkgsortkey): 250 | self.dispatch(f, 'remove') 251 | 252 | def process_IN_CLOSE_WRITE(self, event): 253 | logger.debug('Writing done: %s', event.pathname) 254 | self.dispatch(event.pathname, 'add') 255 | self.files.add(event.pathname) 256 | 257 | def process_IN_DELETE(self, event): 258 | logger.debug('Removing: %s', event.pathname) 259 | self.dispatch(event.pathname, 'remove') 260 | try: 261 | self.files.remove(event.pathname) 262 | except KeyError: 263 | # symlinks haven't been added 264 | pass 265 | 266 | def process_IN_CREATE(self, event): 267 | file = event.pathname 268 | if os.path.islink(file): 269 | if file in self.our_links: 270 | self.our_links.remove(file) 271 | else: 272 | logger.debug('Symlinked: %s', file) 273 | self.dispatch(file, 'add') 274 | self.files.add(file) 275 | else: 276 | logger.debug('Created: %s', file) 277 | self.created[file] = self._ioloop.add_timeout( 278 | self._ioloop.time() + 0.1, 279 | partial(self.linked, file), 280 | ) 281 | 282 | def process_IN_OPEN(self, event): 283 | file = event.pathname 284 | try: 285 | timeout = self.created.pop(file) 286 | except KeyError: 287 | return 288 | 289 | self._ioloop.remove_timeout(timeout) 290 | 291 | def linked(self, file): 292 | logger.debug('Linked: %s', file) 293 | del self.created[file] 294 | self.dispatch(file, 'add') 295 | self.files.add(file) 296 | 297 | def movedOut(self, event): 298 | logger.debug('Moved away: %s', event.pathname) 299 | del self.moved_away[event.cookie] 300 | self.dispatch(event.pathname, 'remove') 301 | 302 | def process_IN_MOVED_FROM(self, event): 303 | self.moved_away[event.cookie] = self._ioloop.add_timeout( 304 | self._ioloop.time() + 0.1, 305 | partial(self.movedOut, event), 306 | ) 307 | self.files.remove(event.pathname) 308 | 309 | def process_IN_MOVED_TO(self, event): 310 | if event.pathname in self.files: 311 | logger.warn('Overwritten: %s', event.pathname) 312 | self.files.add(event.pathname) 313 | 314 | if event.cookie in self.moved_away: 315 | self._ioloop.remove_timeout(self.moved_away.pop(event.cookie)) 316 | else: 317 | logger.debug('Moved here: %s', event.pathname) 318 | self.dispatch(event.pathname, 'add') 319 | 320 | def dispatch(self, path, action): 321 | if path.endswith('.sig'): 322 | act = ActionInfo(path, action, pkgpath=path[:-4]) 323 | callback = self._signature_changed 324 | else: 325 | act = ActionInfo(path, action) 326 | callback = self._real_dispatch 327 | 328 | d, file = os.path.split(path) 329 | base, arch = os.path.split(d) 330 | 331 | # rename if a packager has added to a wrong directory 332 | if self._auto_rename and action == 'add' and act.arch != arch: 333 | newd = os.path.join(base, act.arch) 334 | newpath = os.path.join(newd, file) 335 | if not same_existent_file(path, newpath): 336 | os.rename(path, newpath) 337 | 338 | act.path = newpath 339 | path = newpath 340 | arch = act.arch 341 | d = newd 342 | 343 | if self._symlink_any and act.arch == 'any': 344 | for newarch in self._supported_archs: 345 | if newarch == arch: 346 | # this file itself 347 | continue 348 | newd = os.path.join(base, newarch) 349 | newpath = os.path.join(newd, file) 350 | if action == 'add': 351 | oldpath = os.path.join('..', arch, file) 352 | if not same_existent_file(oldpath, newpath): 353 | os.makedirs(newd, exist_ok=True) 354 | try: 355 | self.our_links.add(newpath) 356 | os.symlink(oldpath, newpath) 357 | except FileExistsError: 358 | pass 359 | callback(newd, ActionInfo(newpath, action)) 360 | else: 361 | try: 362 | os.unlink(newpath) 363 | # this will be detected and handled later 364 | except FileNotFoundError: 365 | # someone deleted the file for us 366 | pass 367 | 368 | callback(d, act) 369 | 370 | def _real_dispatch(self, d, act): 371 | if act.action == 'add': 372 | arch = os.path.split(d)[1] 373 | def callback(stat, state=1): 374 | self._db.execute( 375 | 'update pkginfo set state = 0 where pkgname = ? and forarch = ? and pkgrepo = ?', 376 | (act.name, arch, self.name) 377 | ) 378 | mtime = int(stat.st_mtime) 379 | try: 380 | owner = pwd.getpwuid(stat.st_uid).pw_name 381 | except KeyError: 382 | owner = 'uid_%d' % stat.st_uid 383 | 384 | info = None 385 | self._db.execute( 386 | '''insert or replace into pkginfo 387 | (filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values 388 | (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', 389 | (relpath(act.path, start=self._db_dir), 390 | self.name, act.name, act.arch, act.fullversion, arch, state, owner, mtime, info)) 391 | logger.info('Action %r done.', act) 392 | 393 | # stat path here, so that it is more unlikely to have disappeared since 394 | callback = partial(callback, os.stat(act.path)) 395 | else: 396 | rpath = relpath(act.path, start=self._db_dir) 397 | res = self._db.execute( 398 | 'select state from pkginfo where filename = ? and state = 1 and pkgrepo = ? limit 1', 399 | (rpath, self.name) 400 | ) 401 | if tuple(res) == (): 402 | # the file isn't in repo database, just delete from our info database 403 | logger.debug('deleting entry for not-in-database package: %s', rpath) 404 | self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name)) 405 | return 406 | def callback(state=any): 407 | '''``state`` is not used''' 408 | self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name)) 409 | 410 | act.callback = callback 411 | self.repomans[d].add_action(act) 412 | 413 | def _signature_changed(self, d, action): 414 | path = action.path 415 | action = action.action 416 | logger.info('%s signature %s.', action, path) 417 | 418 | # Touch the pacakge file so that we'll repo-add it again to include the 419 | # sig change later. 420 | pkg = path[:-4] 421 | try: 422 | st = os.lstat(pkg) 423 | if stat.S_ISREG(st.st_mode): 424 | logger.info('touching %s.', pkg) 425 | os.close(os.open(pkg, os.O_WRONLY)) 426 | os.utime(pkg) 427 | except FileNotFoundError: 428 | pass 429 | 430 | rpath = relpath(path, start=self._db_dir) 431 | if action == 'add': 432 | self._db.execute('''insert or replace into sigfiles 433 | (filename, pkgrepo) values (?, ?)''', 434 | (rpath, self.name)) 435 | else: 436 | self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''', 437 | (rpath, self.name)) 438 | 439 | def dec_running(self): 440 | self._n_running -= 1 441 | if self._n_running == 0: 442 | self.send_notification() 443 | self.update_lastupdate() 444 | 445 | def inc_running(self): 446 | self._n_running += 1 447 | 448 | def send_notification_simple_udp(self): 449 | msg = self._new_notification_msg() 450 | 451 | socks = {} 452 | for address, port in self._parse_notification_address_inet(): 453 | try: 454 | af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( 455 | address, port, 0, socket.SOCK_DGRAM, 0, 0)[0] 456 | except: 457 | logger.exception('failed to create socket to %r for notification', 458 | (address, port)) 459 | continue 460 | 461 | info = af, socktype, proto 462 | if info not in socks: 463 | sock = socket.socket(*info) 464 | socks[info] = sock 465 | else: 466 | sock = socks[info] 467 | sock.sendto(msg, sockaddr) 468 | logger.info('simple udp notification sent to %s.', (address, port)) 469 | 470 | def _new_notification_msg(self): 471 | s = 'update' 472 | t = str(int(time.time())) 473 | data = s + '|' + t 474 | hashing = data + self._notification_secret 475 | sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest() 476 | msg = data + '|' + sig 477 | logger.info('new notification msg: %s.', msg) 478 | return msg.encode('utf-8') 479 | 480 | def _parse_notification_address_inet(self): 481 | cached = self._notification_addrs 482 | if isinstance(cached, str): 483 | addresses = [] 484 | for addr in cached.split(): 485 | host, port = addr.rsplit(':', 1) 486 | port = int(port) 487 | addresses.append((host, port)) 488 | cached = self._notification_addrs = tuple(addresses) 489 | return cached 490 | 491 | def send_notification_null(self): 492 | logger.info('null notification sent.') 493 | 494 | def update_lastupdate(self): 495 | t = '%d\n' % time.time() 496 | with open(self._lastupdate_file, 'w') as f: 497 | f.write(t) 498 | 499 | def filter_pkg(regex, path): 500 | if isinstance(path, Event): 501 | path = path.pathname 502 | return not regex.search(path) 503 | 504 | def pkgsortkey(path): 505 | pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1]) 506 | return (pkg.name, pkg.arch, pkg) 507 | 508 | def repomon(config): 509 | wm = pyinotify.WatchManager() 510 | 511 | supported_archs = config.get('supported-archs', 'i686 x86_64').split() 512 | if 'any' not in supported_archs: 513 | supported_archs.append('any') 514 | # assume none of the archs has regex meta characters 515 | regex = re.compile(r'(?:^|/)[^.].*-[^-]+-[\d.]+-(?:' + '|'.join(supported_archs) + r')\.pkg\.tar\.(?:xz|zst)(?:\.sig)?$') 516 | 517 | filter_func = partial(filter_pkg, regex) 518 | handler = EventHandler( 519 | filter_func, 520 | filter_pkg = filter_func, 521 | supported_archs = supported_archs, 522 | config = config, 523 | wm = wm, 524 | ) 525 | ioloop = IOLoop.current() 526 | ret = [pyinotify.TornadoAsyncNotifier( 527 | wm, 528 | default_proc_fun=handler, 529 | ioloop = ioloop, 530 | )] 531 | 532 | if config.get('spool-directory'): 533 | wm = pyinotify.WatchManager() 534 | handler = SpoolHandler( 535 | filter_func, 536 | filter_pkg = filter_func, 537 | path = config.get('spool-directory'), 538 | dstpath = os.path.join(config.get('path'), 'any'), 539 | wm = wm, 540 | ) 541 | ret.append(pyinotify.TornadoAsyncNotifier( 542 | wm, default_proc_fun=handler, 543 | ioloop = ioloop, 544 | )) 545 | 546 | return ret 547 | 548 | class SpoolHandler(pyinotify.ProcessEvent): 549 | def my_init(self, filter_pkg, path, dstpath, wm): 550 | self.filter_pkg = filter_pkg 551 | self.dstpath = dstpath 552 | self._ioloop = IOLoop.current() 553 | self.created = {} 554 | 555 | files = set() 556 | for f in os.listdir(path): 557 | p = os.path.join(path, f) 558 | if os.path.exists(p): # filter broken symlinks 559 | files.add(p) 560 | 561 | wm.add_watch(path, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | 562 | pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN) 563 | self._initial_update(files) 564 | 565 | def _initial_update(self, files): 566 | for f in sorted(filterfalse(self.filter_pkg, files), 567 | key=pkgsortkey): 568 | self.dispatch(f) 569 | 570 | def process_IN_CLOSE_WRITE(self, event): 571 | logger.debug('Writing done: %s', event.pathname) 572 | self.dispatch(event.pathname) 573 | 574 | def process_IN_CREATE(self, event): 575 | file = event.pathname 576 | if os.path.islink(file): 577 | logger.debug('Symlinked: %s', file) 578 | self.dispatch(file) 579 | else: 580 | logger.debug('Created: %s', file) 581 | self.created[file] = self._ioloop.add_timeout( 582 | self._ioloop.time() + 0.1, 583 | partial(self.linked, file), 584 | ) 585 | 586 | def process_IN_OPEN(self, event): 587 | file = event.pathname 588 | try: 589 | timeout = self.created.pop(file) 590 | except KeyError: 591 | return 592 | 593 | self._ioloop.remove_timeout(timeout) 594 | 595 | def linked(self, file): 596 | logger.debug('Linked: %s', file) 597 | del self.created[file] 598 | self.dispatch(file) 599 | 600 | def process_IN_MOVED_TO(self, event): 601 | logger.debug('Moved here: %s', event.pathname) 602 | self.dispatch(event.pathname) 603 | 604 | def dispatch(self, path): 605 | filename = os.path.basename(path) 606 | os.rename(path, os.path.join(self.dstpath, filename)) 607 | -------------------------------------------------------------------------------- /misc/archrepo2.ini.example: -------------------------------------------------------------------------------- 1 | [multi] 2 | # Names of repository config sections, if you have multiple repositories. 3 | # The names should be delimited by commas. And this value is default to 4 | # "repository". 5 | # config names are the same as below, and will act as default if not specified 6 | # in that repository config. 7 | repos: repository 8 | 9 | # You can specify a "info-db" here to have them all in the same repo 10 | #info-db: /home/lilydjwg/tmpfs/test/pkginfo.db 11 | 12 | [repository] 13 | # Name of the repository. In below example the Pacman repository db file name 14 | # will be archlinuxcn.db.tar.gz 15 | name: archlinuxcn 16 | 17 | # Path to the repository - directory should normally contain any, i686 and 18 | # x86_64. The server will monitor files in it with inotify. If you have lots of 19 | # files in this directory, remember to update the configuration of inotify. 20 | path: /home/lilydjwg/tmpfs/test 21 | 22 | # If enabled, packages put into this directory will be moved into the repo. 23 | # This path should be on the same filesystem as the repo path 24 | # Should be used with auto-rename on 25 | spool-directory: /home/lilydjwg/tmpfs/spool 26 | 27 | # A database to store package info. Default to ${path}/pkginfo.db 28 | #info-db: /home/lilydjwg/tmpfs/test/pkginfo.db 29 | 30 | # Specify where to find these commands 31 | #command-add: repo-add 32 | #command-remove: repo-remove 33 | 34 | # By enabling auto-rename, the server will automatically rename the package 35 | # files according to filenames, and move them under the correct architecture 36 | # directory. Default is on. 37 | #auto-rename: on 38 | 39 | # What archs we support? The default is i686 and x86_64. And you can add more 40 | # like arm, armv6h, aarch64. Archs are separated by spaces. 41 | #supported-archs: i686 x86_64 arm 42 | 43 | # By enabling symlink-any, the server will automatically symlink the package 44 | # files of 'any' architecture to supported archs. 45 | # Default is on. 46 | #symlink-any: on 47 | 48 | # Seconds before actually running the command. 10s by default. 49 | #wait-time: 10 50 | wait-time: 3 51 | 52 | # Notification type to use when done. Currently available: simple-udp, null 53 | notification-type: simple-udp 54 | notification-addresses: 127.0.0.1:9900 ::1:9900 55 | notification-secret: JiUHuGY987G76djItfOskOj 56 | 57 | # If for any reason, you don't want actual database creation or update: 58 | #without-db: true 59 | 60 | # vim: se ft=dosini: 61 | -------------------------------------------------------------------------------- /misc/archrepo2.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=archrepo2 service for archlinuxcn repo 3 | 4 | [Service] 5 | Type=simple 6 | ExecStart=/usr/bin/archreposrv /etc/archrepo2.ini 7 | Restart=on-failure 8 | 9 | [Install] 10 | WantedBy=multi-user.target 11 | -------------------------------------------------------------------------------- /scripts/on_update_notification: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import hashlib 5 | import logging 6 | import os 7 | import select 8 | import socket 9 | import subprocess 10 | import sys 11 | import time 12 | 13 | from nicelogger import enable_pretty_logging 14 | 15 | def run_command(command): 16 | logging.info('running command %r', command) 17 | try: 18 | subprocess.check_call(command, shell=True) 19 | except: 20 | logging.exception('failed to run command %r', command) 21 | 22 | def decode_msg(msg, secret): 23 | act, t, sig = msg.split('|') 24 | hashing = act + '|' + t + secret 25 | mysig = hashlib.sha1(hashing.encode('utf-8')).hexdigest() 26 | if mysig != sig: 27 | raise ValueError('signature mismatch') 28 | return act, int(t) 29 | 30 | def main(args, secret): 31 | af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( 32 | args.host, args.port, 0, socket.SOCK_DGRAM, 0, 0)[0] 33 | sock = socket.socket(af, socktype, proto) 34 | sock.bind((args.host, args.port)) 35 | last_run = 0 36 | while True: 37 | r, w, e = select.select([sock], [], [], args.timeout) 38 | if r: 39 | msg, remote = sock.recvfrom(4096) 40 | try: 41 | msg = msg.decode('utf-8') 42 | act, t = decode_msg(msg, secret) 43 | now = time.time() 44 | if not (act == 'update' and abs(t - now) < args.threshold): 45 | logging.warn('skipping unknown or expired msg %r from %r...', 46 | msg, remote) 47 | continue 48 | if abs(now - last_run) < args.repeat_window: 49 | logging.warn('refuse to run too frequently. last run: %r. msg %r from %r...', 50 | time.ctime(last_run), msg, remote) 51 | continue 52 | 53 | last_run = now 54 | run_command(args.command) 55 | except: 56 | logging.exception('error occurred, skipping msg %r from %r...', 57 | msg, remote) 58 | else: 59 | run_command(args.command) 60 | 61 | if __name__ == '__main__': 62 | enable_pretty_logging('INFO') 63 | parser = argparse.ArgumentParser( 64 | description='run command on archrepo2 update notification', 65 | add_help=False, 66 | ) 67 | parser.add_argument('-h', '--host', default='0.0.0.0', 68 | help='host to bind to. default: IPv4 wild address') 69 | parser.add_argument('-p', '--port', type=int, required=True, 70 | help='port to wait on') 71 | parser.add_argument('-t', '--timeout', type=float, 72 | help='timeout for waiting. will run command') 73 | parser.add_argument('-r', '--threshold', type=int, default=60, 74 | help='time threshold for message timestamp. default: 60') 75 | parser.add_argument('-w', '--repeat-window', metavar='SECS', type=int, default=60, 76 | help="don't repeat within this amount of seconds. default: 60") 77 | parser.add_argument('--help', action='help', 78 | help='show this help message and exit') 79 | parser.add_argument('command', 80 | help='command to run') 81 | args = parser.parse_args() 82 | 83 | secret = os.environ.get('REPO_SECRET', '') 84 | if not secret: 85 | logging.fatal('REPO_SECRET environment variable not set') 86 | sys.exit(1) 87 | 88 | logging.info('started') 89 | try: 90 | main(args, secret) 91 | except KeyboardInterrupt: 92 | logging.info('stopped') 93 | -------------------------------------------------------------------------------- /scripts/touch_pkg_with_newer_sig: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | for f; do 4 | p=${f%.sig} 5 | if [[ -f $p && $f -nt $p ]]; then 6 | echo "touching $p." 7 | touch "$p" 8 | fi 9 | done 10 | -------------------------------------------------------------------------------- /scripts/upgrade_from_0.2_to_0.3.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # vim:fileencoding=utf-8 3 | 4 | import os, sys 5 | import sqlite3 6 | import pickle 7 | import logging 8 | 9 | from archrepo2.lib.nicelogger import enable_pretty_logging 10 | enable_pretty_logging(logging.DEBUG) 11 | 12 | from archrepo2.dbutil import * 13 | 14 | def main(dbname, reponame): 15 | db = sqlite3.connect(dbname, isolation_level=None) 16 | if getver(db) != '0.2': 17 | raise Exception('wrong database version') 18 | 19 | input('Please stop the service and then press Enter.') 20 | try: 21 | db.execute('alter table pkginfo add pkgrepo text') 22 | db.execute('update pkginfo set pkgrepo = ?', (reponame,)) 23 | except sqlite3.OperationalError: 24 | # the column is already there 25 | pass 26 | try: 27 | db.execute('alter table sigfiles add pkgrepo text') 28 | db.execute('update sigfiles set pkgrepo = ?', (reponame,)) 29 | except sqlite3.OperationalError: 30 | # the column is already there 31 | pass 32 | 33 | setver(db, '0.3') 34 | db.close() 35 | 36 | input('Please re-start the service with new code and then press Enter.') 37 | 38 | if __name__ == '__main__': 39 | if len(sys.argv) != 3: 40 | sys.exit('usage: upgrade_from_0.2_to_0.3.py info-database-file repository-name') 41 | main(*sys.argv[1:]) 42 | -------------------------------------------------------------------------------- /scripts/upgrade_from_0.3_to_0.4.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # vim:fileencoding=utf-8 3 | 4 | import os, sys 5 | import sqlite3 6 | import pickle 7 | import logging 8 | 9 | from archrepo2.lib.nicelogger import enable_pretty_logging 10 | enable_pretty_logging(logging.DEBUG) 11 | 12 | from archrepo2.dbutil import * 13 | 14 | def main(dbname): 15 | db = sqlite3.connect(dbname, isolation_level=None) 16 | if getver(db) != '0.3': 17 | raise Exception('wrong database version') 18 | 19 | base_dir = os.path.dirname(dbname) 20 | input('Please stop the service and then press Enter.') 21 | 22 | p = db.execute('select filename from sigfiles limit 1').fetchone()[0] 23 | newp = os.path.relpath(p, start=base_dir) 24 | suffix_len = len(os.path.commonprefix((newp[::-1], p[::-1]))) 25 | old_prefix = p[:-suffix_len] 26 | new_prefix = newp[:-suffix_len] 27 | db.execute(''' 28 | UPDATE OR REPLACE sigfiles 29 | SET filename = REPLACE(filename, ?, ?) 30 | ''', (old_prefix, new_prefix)) 31 | db.execute(''' 32 | UPDATE OR REPLACE pkginfo 33 | SET filename = REPLACE(filename, ?, ?) 34 | ''', (old_prefix, new_prefix)) 35 | 36 | setver(db, '0.4') 37 | db.close() 38 | 39 | input('Please re-start the service with new code and then press Enter.') 40 | 41 | if __name__ == '__main__': 42 | if len(sys.argv) != 2: 43 | sys.exit('usage: upgrade_from_0.3_to_0.4.py info-database-file') 44 | main(*sys.argv[1:]) 45 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from setuptools import setup, find_packages 4 | import archrepo2 5 | 6 | setup( 7 | name = 'archrepo2', 8 | version = archrepo2.__version__, 9 | packages = find_packages(), 10 | install_requires = ['tornado>2.4.1', 'pyinotify', 'pyalpm'], 11 | entry_points = { 12 | 'console_scripts': [ 13 | 'archreposrv = archrepo2.archreposrv:main', 14 | ], 15 | }, 16 | 17 | author = 'lilydjwg', 18 | author_email = 'lilydjwg@gmail.com', 19 | description = 'Arch Linux repository manager', 20 | license = 'MIT', 21 | keywords = 'archlinux linux', 22 | url = 'https://github.com/lilydjwg/archrepo2', 23 | ) 24 | -------------------------------------------------------------------------------- /test/instructions.act: -------------------------------------------------------------------------------- 1 | base_dir: /var/cache/pacman/pkg 2 | # wait for server to start 3 | wait 4 | 5 | # x86_64 packages 6 | add: any zip-3.0-3-x86_64.pkg.tar.xz 7 | wait 8 | checky: x86_64/zip-3.0-3-x86_64.pkg.tar.xz 9 | checkp: x86_64 zip=3.0-3 10 | 11 | # 'any' package 12 | add: x86_64 youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 13 | wait 14 | checky: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 15 | checky: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 16 | checky: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 17 | checkp: x86_64 youtube-dl=2014.01.28.1-1 18 | checkp: i686 youtube-dl=2014.01.28.1-1 19 | 20 | # update a package 21 | add: any youtube-dl-2014.01.29-1-any.pkg.tar.xz 22 | wait 23 | checky: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz 24 | checky: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz 25 | checky: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz 26 | checkp: x86_64 youtube-dl=2014.01.29-1 27 | checkp: i686 youtube-dl=2014.01.29-1 28 | 29 | # downgrade and remove 30 | add: x86_64 youtube-dl-2014.01.29-1-any.pkg.tar.xz 31 | remove: any youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 32 | wait 33 | checkn: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 34 | checkn: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 35 | checkn: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz 36 | checkp: x86_64 youtube-dl=2014.01.29-1 37 | checkp: i686 youtube-dl=2014.01.29-1 38 | 39 | # completely remove packages 40 | remove: any youtube-dl-2014.01.29-1-any.pkg.tar.xz 41 | remove: x86_64 zip-3.0-3-x86_64.pkg.tar.xz 42 | wait 43 | checkn: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz 44 | checkn: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz 45 | checkn: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz 46 | checkp: x86_64 youtube-dl=null 47 | checkp: i686 youtube-dl=null 48 | checkp: any youtube-dl=null 49 | checkp: i686 zip=null 50 | checkp: x86_64 zip=null 51 | 52 | # add then, while adding, remove it 53 | # look at the log carefully! 54 | add: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz 55 | racing-wait 56 | remove: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz 57 | wait 58 | checkn: x86_64/linux-3.12.8-1-x86_64.pkg.tar.xz 59 | checkp: x86_64 linux=null 60 | -------------------------------------------------------------------------------- /test/repomon_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import sys 5 | import time 6 | import configparser 7 | import subprocess 8 | import logging 9 | import shutil 10 | import tarfile 11 | 12 | from myutils import enable_pretty_logging 13 | enable_pretty_logging(logging.DEBUG) 14 | 15 | class Command: 16 | def __init__(self, ctx, args): 17 | self.args = args 18 | self.ctx = ctx 19 | self.run() 20 | 21 | class WaitCommand(Command): 22 | cmd = 'wait' 23 | def run(self): 24 | t = self.ctx['wait_time'] + 2 25 | logging.info('waiting for %d seconds...', t) 26 | time.sleep(t) 27 | 28 | class RacingWaitCommand(Command): 29 | cmd = 'racing-wait' 30 | def run(self): 31 | t = self.ctx['wait_time'] + 0.3 32 | logging.info('Racing-waiting for %s seconds...', t) 33 | time.sleep(t) 34 | 35 | class BaseDirCommand(Command): 36 | cmd = 'base_dir' 37 | def run(self): 38 | base_dir = self.args[0] 39 | logging.info('base_dir set to %s.', base_dir) 40 | self.ctx['base_dir'] = base_dir 41 | 42 | class AddCommand(Command): 43 | cmd = 'add' 44 | def run(self): 45 | arch, file = self.args 46 | srcfile = os.path.join(self.ctx['base_dir'], file) 47 | dstfile = os.path.join(self.ctx['repo_dir'], arch, file) 48 | logging.info('adding file %s', file) 49 | shutil.copyfile(srcfile, dstfile) 50 | 51 | class RemoveCommand(Command): 52 | cmd = 'remove' 53 | def run(self): 54 | arch, file = self.args 55 | file = arch + '/' + file 56 | dstfile = os.path.join(self.ctx['repo_dir'], file) 57 | os.unlink(dstfile) 58 | logging.info('removing file %s', file) 59 | 60 | class CheckYCommand(Command): 61 | cmd = 'checky' 62 | def run(self): 63 | f = os.path.join(self.ctx['repo_dir'], self.args[0]) 64 | r = os.path.isfile(f) 65 | if not r: 66 | logging.error('checky assertion failed: %s is not a file.', f) 67 | 68 | class CheckNCommand(Command): 69 | cmd = 'checkn' 70 | def run(self): 71 | f = os.path.join(self.ctx['repo_dir'], self.args[0]) 72 | r = os.path.exists(f) 73 | if r: 74 | logging.error('checkn assertion failed: %s exists.', f) 75 | 76 | class CheckPCommand(Command): 77 | cmd = 'checkp' 78 | def run(self): 79 | arch, what = self.args 80 | dbfile = os.path.join(self.ctx['repo_dir'], arch, self.ctx['repo_name'] + '.db') 81 | db = tarfile.open(dbfile) 82 | name, ver = what.split('=', 1) 83 | if ver == 'null': 84 | pkg = [x for x in db.getnames() if '/' not in x and x.startswith(name+'-') and x[len(name)+1:].count('-') != 1] 85 | if pkg: 86 | logging.error('checkp assertion failed: package %s still exists in database: %r', name, pkg) 87 | else: 88 | try: 89 | db.getmember('%s-%s' % (name, ver)) 90 | except KeyError: 91 | logging.error('checkp assertion failed: package %s does not exist in database.', what) 92 | db.close() 93 | 94 | def build_command_map(cmdcls=Command, cmdmap={}): 95 | for cls in cmdcls.__subclasses__(): 96 | cmdmap[cls.cmd] = cls 97 | build_command_map(cls) 98 | return cmdmap 99 | 100 | def build_action_ctx(conf): 101 | ctx = {} 102 | ctx['repo_dir'] = conf.get('path') 103 | ctx['repo_name'] = conf.get('name') 104 | ctx['wait_time'] = conf.getint('wait-time', 10) 105 | ctx['base_dir'] = '' 106 | return ctx 107 | 108 | def run_action_file(conf, actlines): 109 | cmdmap = build_command_map() 110 | ctx = build_action_ctx(conf) 111 | for l in actlines: 112 | l = l.rstrip() 113 | if not l or l.startswith('#'): 114 | continue 115 | cmd, *args = l.split() 116 | cmd = cmd.rstrip(':') 117 | try: 118 | cmdmap[cmd](ctx, args) 119 | except KeyboardInterrupt: 120 | logging.info('Interrupted.') 121 | break 122 | except: 123 | logging.error('error running action: %s', l, exc_info=True) 124 | logging.info('done running action file.') 125 | 126 | class Server: 127 | def __init__(self, conffile): 128 | self.conffile = conffile 129 | 130 | def start(self): 131 | logging.info('starting server...') 132 | self.p = subprocess.Popen(['archreposrv', self.conffile]) 133 | 134 | def stop(self): 135 | logging.info('quitting server...') 136 | p = self.p 137 | p.send_signal(2) 138 | ret = p.wait() 139 | if ret == 0: 140 | logging.info('server exited normally.') 141 | else: 142 | logging.error('server exited with error code %d.' % ret) 143 | 144 | def main(conffile, actfile): 145 | config = configparser.ConfigParser() 146 | config.read(conffile) 147 | 148 | dest_dir = config['repository'].get('path') 149 | if os.path.isdir(dest_dir): 150 | ans = input('Repository directory for testing exists. Removing? [y/N] ') 151 | if ans not in 'Yy': 152 | logging.warn('user cancelled.') 153 | sys.exit(1) 154 | else: 155 | logging.info('removing already existing testing repo...') 156 | shutil.rmtree(dest_dir) 157 | os.mkdir(dest_dir) 158 | for d in ('any', 'i686', 'x86_64'): 159 | p = os.path.join(dest_dir, d) 160 | os.mkdir(p) 161 | 162 | server = Server(conffile) 163 | server.start() 164 | 165 | with open(actfile) as acts: 166 | run_action_file(config['repository'], acts) 167 | 168 | server.stop() 169 | logging.info('removing testing repo...') 170 | shutil.rmtree(dest_dir) 171 | 172 | if __name__ == '__main__': 173 | if len(sys.argv) != 3: 174 | sys.exit('usage: %s repo_config test_action' % os.path.split(sys.argv[0])[-1]) 175 | main(sys.argv[1], sys.argv[2]) 176 | --------------------------------------------------------------------------------