├── src └── mr │ ├── developer │ ├── tests │ │ ├── __init__.py │ │ ├── test_cvs.py │ │ ├── conftest.py │ │ ├── test_svn.py │ │ ├── test_common.py │ │ ├── test_mercurial.py │ │ ├── test_commands.py │ │ ├── utils.py │ │ ├── test_git.py │ │ ├── test_extension.py │ │ └── test_git_submodules.py │ ├── __init__.py │ ├── compat.py │ ├── filesystem.py │ ├── gitsvn.py │ ├── bazaar.py │ ├── develop.py │ ├── darcs.py │ ├── cvs.py │ ├── mercurial.py │ ├── extension.py │ ├── git.py │ ├── svn.py │ └── common.py │ └── __init__.py ├── MANIFEST.in ├── pyproject.toml ├── setup.cfg ├── .gitignore ├── tox.ini ├── setup.py ├── HELP.rst └── README.rst /src/mr/developer/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.rst 2 | include *.ini 3 | prune mr.developer.addon 4 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # We still support older Pythons, and that means it is better to use an 3 | # older setuptools to create the distributions. 4 | requires = ["setuptools<69"] 5 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_cvs.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import doctest 3 | import mr.developer.cvs 4 | 5 | 6 | def test_suite(): 7 | return unittest.TestSuite([doctest.DocTestSuite(mr.developer.cvs)]) 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [zest.releaser] 5 | version-levels = 2 6 | 7 | [devpi:upload] 8 | formats = sdist.tgz,bdist_wheel 9 | 10 | [check-manifest] 11 | ignore = 12 | build_git.sh 13 | buildout.cfg 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[co] 2 | *.swp 3 | *.egg-info 4 | /.cache 5 | /.coverage 6 | /.mr.developer.cfg 7 | /.tox/ 8 | /coverage.xml 9 | /bin/* 10 | /build/* 11 | /develop-eggs/* 12 | /dist/* 13 | /eggs/* 14 | /etc 15 | /htmlcov/ 16 | /include/ 17 | /.installed.cfg 18 | /lib/ 19 | /parts/ -------------------------------------------------------------------------------- /src/mr/__init__.py: -------------------------------------------------------------------------------- 1 | # See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages 2 | try: 3 | __import__('pkg_resources').declare_namespace(__name__) 4 | except ImportError: 5 | from pkgutil import extend_path 6 | __path__ = extend_path(__path__, __name__) 7 | -------------------------------------------------------------------------------- /src/mr/developer/__init__.py: -------------------------------------------------------------------------------- 1 | # See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages 2 | try: 3 | __import__('pkg_resources').declare_namespace(__name__) 4 | except ImportError: 5 | from pkgutil import extend_path 6 | __path__ = extend_path(__path__, __name__) 7 | -------------------------------------------------------------------------------- /src/mr/developer/compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | if sys.version_info < (3, 0): 5 | def b(x): 6 | return x 7 | 8 | def s(x): 9 | return x 10 | 11 | else: 12 | def b(x): 13 | return bytes(x, "utf-8") 14 | 15 | def s(x): 16 | return str(x, "utf-8") 17 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py27-configparser,py34,py35,py36,py37,py38,py39,py310,py311,py312,py313,flake8 3 | 4 | 5 | [base] 6 | deps = 7 | mock 8 | pytest 9 | 10 | 11 | [testenv] 12 | commands = py.test --cov {envsitepackagesdir}/mr/developer --cov-report=term --cov-report=html --cov-report=xml {envsitepackagesdir}/mr/developer {posargs} 13 | deps = 14 | {[base]deps} 15 | pytest-cov 16 | configparser: configparser 17 | 18 | 19 | [testenv:flake8] 20 | commands = flake8 --ignore E501 setup.py src 21 | deps = flake8 22 | skip_install = true 23 | 24 | 25 | [pytest] 26 | addopts = 27 | -r a 28 | --tb=native 29 | --doctest-modules 30 | --strict 31 | -W once::DeprecationWarning 32 | testpaths = src/mr/ 33 | -------------------------------------------------------------------------------- /src/mr/developer/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import shutil 4 | import tempfile 5 | 6 | 7 | class Path(str): 8 | def __getitem__(self, name): 9 | return Path(os.path.join(self, name)) 10 | 11 | def create_file(self, *content): 12 | f = open(self, 'w') 13 | f.write('\n'.join(content)) 14 | f.close() 15 | 16 | 17 | @pytest.fixture 18 | def src(tempdir): 19 | base = tempdir['src'] 20 | os.mkdir(base) 21 | return base 22 | 23 | 24 | @pytest.fixture 25 | def tempdir(): 26 | cwd = os.getcwd() 27 | tempdir = os.path.realpath(tempfile.mkdtemp()) 28 | try: 29 | os.chdir(tempdir) 30 | try: 31 | yield Path(tempdir) 32 | finally: 33 | os.chdir(cwd) 34 | finally: 35 | shutil.rmtree(tempdir) 36 | 37 | 38 | @pytest.fixture 39 | def mkgitrepo(tempdir): 40 | from mr.developer.tests.utils import GitRepo 41 | 42 | def mkgitrepo(name): 43 | repository = GitRepo(tempdir[name]) 44 | repository.init() 45 | repository.setup_user() 46 | return repository 47 | 48 | return mkgitrepo 49 | 50 | 51 | @pytest.fixture 52 | def develop(src): 53 | from mr.developer.tests.utils import MockDevelop 54 | develop = MockDevelop() 55 | develop.sources_dir = src 56 | return develop 57 | 58 | 59 | @pytest.fixture(autouse=True) 60 | def _patch_git_working_copy_for_tests(monkeypatch): 61 | from mr.developer.git import GitWorkingCopy 62 | monkeypatch.setattr(GitWorkingCopy, "_always_allow_file_protocol", True) 63 | -------------------------------------------------------------------------------- /src/mr/developer/filesystem.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | import os 3 | 4 | logger = common.logger 5 | 6 | 7 | class FilesystemError(common.WCError): 8 | pass 9 | 10 | 11 | class FilesystemWorkingCopy(common.BaseWorkingCopy): 12 | def checkout(self, **kwargs): 13 | name = self.source['name'] 14 | path = self.source['path'] 15 | if os.path.exists(path): 16 | if self.matches(): 17 | self.output((logger.info, 'Filesystem package %r doesn\'t need a checkout.' % name)) 18 | else: 19 | raise FilesystemError( 20 | 'Directory name for existing package %r differs. ' 21 | 'Expected %r.' % (name, self.source['url'])) 22 | else: 23 | raise FilesystemError( 24 | "Directory %r for package %r doesn't exist. " 25 | "Check in the documentation if you need to add/change a 'sources-dir' option in " 26 | "your [buildout] section or a 'path' option in [sources]." % (path, name)) 27 | return '' 28 | 29 | def matches(self): 30 | return os.path.split(self.source['path'])[1] == self.source['url'] 31 | 32 | def status(self, **kwargs): 33 | if kwargs.get('verbose', False): 34 | return 'clean', '' 35 | return 'clean' 36 | 37 | def update(self, **kwargs): 38 | name = self.source['name'] 39 | if not self.matches(): 40 | raise FilesystemError( 41 | 'Directory name for existing package %r differs. ' 42 | 'Expected %r.' % (name, self.source['url'])) 43 | self.output((logger.info, 'Filesystem package %r doesn\'t need update.' % name)) 44 | return '' 45 | -------------------------------------------------------------------------------- /src/mr/developer/gitsvn.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | from mr.developer.svn import SVNWorkingCopy 3 | import subprocess 4 | 5 | 6 | logger = common.logger 7 | 8 | 9 | class GitSVNError(common.WCError): 10 | pass 11 | 12 | 13 | class GitSVNWorkingCopy(SVNWorkingCopy): 14 | 15 | def __init__(self, source): 16 | super(GitSVNWorkingCopy, self).__init__(source) 17 | self.gitify_executable = common.which('gitify') 18 | 19 | def gitify_init(self, **kwargs): 20 | name = self.source['name'] 21 | path = self.source['path'] 22 | self.output((logger.info, "Gitified '%s'." % name)) 23 | cmd = subprocess.Popen( 24 | [self.gitify_executable, "init"], 25 | cwd=path, 26 | stdout=subprocess.PIPE, 27 | stderr=subprocess.PIPE) 28 | stdout, stderr = cmd.communicate() 29 | if cmd.returncode != 0: 30 | raise GitSVNError("gitify init for '%s' failed.\n%s" % (name, stdout)) 31 | if kwargs.get('verbose', False): 32 | return stdout 33 | 34 | def svn_checkout(self, **kwargs): 35 | super(GitSVNWorkingCopy, self).svn_checkout(**kwargs) 36 | return self.gitify_init(**kwargs) 37 | 38 | def svn_switch(self, **kwargs): 39 | super(GitSVNWorkingCopy, self).svn_switch(**kwargs) 40 | return self.gitify_init(**kwargs) 41 | 42 | def svn_update(self, **kwargs): 43 | name = self.source['name'] 44 | path = self.source['path'] 45 | self.output((logger.info, "Updated '%s' with gitify." % name)) 46 | cmd = subprocess.Popen( 47 | [self.gitify_executable, "update"], 48 | cwd=path, 49 | stdout=subprocess.PIPE, 50 | stderr=subprocess.PIPE) 51 | stdout, stderr = cmd.communicate() 52 | if cmd.returncode != 0: 53 | raise GitSVNError("gitify update for '%s' failed.\n%s" % (name, stdout)) 54 | if kwargs.get('verbose', False): 55 | return stdout 56 | 57 | def status(self, **kwargs): 58 | svn_status = super(GitSVNWorkingCopy, self).status(**kwargs) 59 | if svn_status == 'clean': 60 | return common.get_workingcopytypes()['git']( 61 | self.source).status(**kwargs) 62 | else: 63 | if kwargs.get('verbose', False): 64 | return svn_status, '' 65 | return svn_status 66 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_svn.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | from mr.developer.extension import Source 3 | from mr.developer.tests.utils import Process 4 | import os 5 | import pytest 6 | 7 | 8 | class TestSVN: 9 | @pytest.fixture(autouse=True) 10 | def clear_svn_caches(self): 11 | from mr.developer.svn import SVNWorkingCopy 12 | SVNWorkingCopy._clear_caches() 13 | 14 | def testUpdateWithoutRevisionPin(self, develop, src, tempdir): 15 | from mr.developer.commands import CmdCheckout 16 | from mr.developer.commands import CmdUpdate 17 | process = Process() 18 | repository = tempdir['repository'] 19 | process.check_call("svnadmin create %s" % repository) 20 | checkout = tempdir['checkout'] 21 | process.check_call( 22 | "svn checkout file://%s %s" % (repository, checkout), 23 | echo=False) 24 | foo = checkout['foo'] 25 | foo.create_file('foo') 26 | process.check_call("svn add %s" % foo, echo=False) 27 | process.check_call("svn commit %s -m foo" % foo, echo=False) 28 | bar = checkout['bar'] 29 | bar.create_file('bar') 30 | process.check_call("svn add %s" % bar, echo=False) 31 | process.check_call("svn commit %s -m bar" % bar, echo=False) 32 | develop.sources = { 33 | 'egg': Source( 34 | kind='svn', 35 | name='egg', 36 | url='file://%s' % repository, 37 | path=src['egg'])} 38 | _log = patch('mr.developer.svn.logger') 39 | log = _log.__enter__() 40 | try: 41 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 42 | assert set(os.listdir(src['egg'])) == set(('.svn', 'bar', 'foo')) 43 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 44 | assert set(os.listdir(src['egg'])) == set(('.svn', 'bar', 'foo')) 45 | assert log.method_calls == [ 46 | ('info', ("Checked out 'egg' with subversion.",), {}), 47 | ('info', ("Updated 'egg' with subversion.",), {})] 48 | finally: 49 | _log.__exit__(None, None, None) 50 | 51 | def testUpdateWithRevisionPin(self, develop, src, tempdir): 52 | from mr.developer.commands import CmdCheckout 53 | from mr.developer.commands import CmdUpdate 54 | process = Process() 55 | repository = tempdir['repository'] 56 | process.check_call("svnadmin create %s" % repository) 57 | checkout = tempdir['checkout'] 58 | process.check_call( 59 | "svn checkout file://%s %s" % (repository, checkout), 60 | echo=False) 61 | foo = checkout['foo'] 62 | foo.create_file('foo') 63 | process.check_call("svn add %s" % foo, echo=False) 64 | process.check_call("svn commit %s -m foo" % foo, echo=False) 65 | bar = checkout['bar'] 66 | bar.create_file('bar') 67 | process.check_call("svn add %s" % bar, echo=False) 68 | process.check_call("svn commit %s -m bar" % bar, echo=False) 69 | develop.sources = { 70 | 'egg': Source( 71 | kind='svn', 72 | name='egg', 73 | url='file://%s@1' % repository, 74 | path=src['egg'])} 75 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 76 | assert set(os.listdir(src['egg'])) == set(('.svn', 'foo')) 77 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 78 | assert set(os.listdir(src['egg'])) == set(('.svn', 'foo')) 79 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | 4 | version = '2.0.5.dev0' 5 | 6 | 7 | install_requires = [ 8 | 'setuptools', 9 | 'zc.buildout', 10 | 'six', 11 | ] 12 | 13 | tests_require = [ 14 | 'mock'] 15 | 16 | extras_require = { 17 | 'test': tests_require} 18 | 19 | 20 | def get_text_from_file(fn): 21 | text = open(fn, 'rb').read() 22 | return text.decode('utf-8') 23 | 24 | 25 | setup(name='mr.developer', 26 | version=version, 27 | description="A zc.buildout extension to ease the development of large projects with lots of packages.", 28 | long_description="\n\n".join([ 29 | get_text_from_file("README.rst"), 30 | get_text_from_file("HELP.rst"), 31 | get_text_from_file("CHANGES.rst")]), 32 | # Get more strings from https://pypi.org/classifiers/ 33 | classifiers=[ 34 | "Development Status :: 5 - Production/Stable", 35 | "Programming Language :: Python", 36 | "Programming Language :: Python :: 2", 37 | "Programming Language :: Python :: 2.7", 38 | "Programming Language :: Python :: 3", 39 | "Programming Language :: Python :: 3.4", 40 | "Programming Language :: Python :: 3.5", 41 | "Programming Language :: Python :: 3.6", 42 | "Programming Language :: Python :: 3.7", 43 | "Programming Language :: Python :: 3.8", 44 | "Programming Language :: Python :: 3.9", 45 | "Programming Language :: Python :: 3.10", 46 | "Programming Language :: Python :: 3.11", 47 | "Programming Language :: Python :: 3.12", 48 | "Programming Language :: Python :: 3.13", 49 | "Framework :: Buildout", 50 | "Topic :: Software Development :: Libraries :: Python Modules"], 51 | keywords='buildout extension vcs git develop', 52 | author='Florian Schulze', 53 | author_email='florian.schulze@gmx.net', 54 | url='http://github.com/fschulze/mr.developer', 55 | license='BSD', 56 | packages=['mr', 'mr.developer', 'mr.developer.tests'], 57 | package_dir={'': 'src'}, 58 | namespace_packages=['mr', 'mr.developer'], 59 | include_package_data=True, 60 | zip_safe=False, 61 | install_requires=install_requires, 62 | tests_require=tests_require, 63 | extras_require=extras_require, 64 | python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", 65 | test_suite='mr.developer.tests', 66 | entry_points=""" 67 | [console_scripts] 68 | develop = mr.developer.develop:develop 69 | [zc.buildout.extension] 70 | default = mr.developer.extension:extension 71 | [mr.developer.workingcopytypes] 72 | svn = mr.developer.svn:SVNWorkingCopy 73 | git = mr.developer.git:GitWorkingCopy 74 | gitsvn = mr.developer.gitsvn:GitSVNWorkingCopy 75 | hg = mr.developer.mercurial:MercurialWorkingCopy 76 | bzr = mr.developer.bazaar:BazaarWorkingCopy 77 | fs = mr.developer.filesystem:FilesystemWorkingCopy 78 | cvs = mr.developer.cvs:CVSWorkingCopy 79 | darcs = mr.developer.darcs:DarcsWorkingCopy 80 | [mr.developer.commands] 81 | activate = mr.developer.commands:CmdActivate 82 | arguments = mr.developer.commands:CmdArguments 83 | checkout = mr.developer.commands:CmdCheckout 84 | deactivate = mr.developer.commands:CmdDeactivate 85 | help = mr.developer.commands:CmdHelp 86 | info = mr.developer.commands:CmdInfo 87 | list = mr.developer.commands:CmdList 88 | pony = mr.developer.commands:CmdPony 89 | purge = mr.developer.commands:CmdPurge 90 | rebuild = mr.developer.commands:CmdRebuild 91 | reset = mr.developer.commands:CmdReset 92 | status = mr.developer.commands:CmdStatus 93 | update = mr.developer.commands:CmdUpdate 94 | """) 95 | -------------------------------------------------------------------------------- /src/mr/developer/bazaar.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | import os 3 | import subprocess 4 | 5 | logger = common.logger 6 | 7 | 8 | class BazaarError(common.WCError): 9 | pass 10 | 11 | 12 | class BazaarWorkingCopy(common.BaseWorkingCopy): 13 | 14 | def __init__(self, source): 15 | super(BazaarWorkingCopy, self).__init__(source) 16 | self.bzr_executable = common.which('bzr') 17 | 18 | def bzr_branch(self, **kwargs): 19 | name = self.source['name'] 20 | path = self.source['path'] 21 | url = self.source['url'] 22 | if os.path.exists(path): 23 | self.output( 24 | (logger.info, 'Skipped branching existing package %r.' % name)) 25 | return 26 | self.output((logger.info, 'Branched %r with bazaar.' % name)) 27 | env = dict(os.environ) 28 | env.pop('PYTHONPATH', None) 29 | cmd = subprocess.Popen( 30 | [self.bzr_executable, 'branch', '--quiet', url, path], 31 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 32 | stdout, stderr = cmd.communicate() 33 | if cmd.returncode != 0: 34 | raise BazaarError( 35 | 'bzr branch for %r failed.\n%s' % (name, stderr)) 36 | if kwargs.get('verbose', False): 37 | return stdout 38 | 39 | def bzr_pull(self, **kwargs): 40 | name = self.source['name'] 41 | path = self.source['path'] 42 | url = self.source['url'] 43 | self.output((logger.info, 'Updated %r with bazaar.' % name)) 44 | env = dict(os.environ) 45 | env.pop('PYTHONPATH', None) 46 | cmd = subprocess.Popen( 47 | [self.bzr_executable, 'pull', url], cwd=path, 48 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 49 | stdout, stderr = cmd.communicate() 50 | if cmd.returncode != 0: 51 | raise BazaarError( 52 | 'bzr pull for %r failed.\n%s' % (name, stderr)) 53 | if kwargs.get('verbose', False): 54 | return stdout 55 | 56 | def checkout(self, **kwargs): 57 | name = self.source['name'] 58 | path = self.source['path'] 59 | update = self.should_update(**kwargs) 60 | if os.path.exists(path): 61 | if update: 62 | self.update(**kwargs) 63 | elif self.matches(): 64 | self.output( 65 | (logger.info, 'Skipped checkout of existing package %r.' % name)) 66 | else: 67 | raise BazaarError( 68 | 'Source URL for existing package %r differs. ' 69 | 'Expected %r.' % (name, self.source['url'])) 70 | else: 71 | return self.bzr_branch(**kwargs) 72 | 73 | def matches(self): 74 | name = self.source['name'] 75 | path = self.source['path'] 76 | env = dict(os.environ) 77 | env.pop('PYTHONPATH', None) 78 | cmd = subprocess.Popen( 79 | [self.bzr_executable, 'info'], cwd=path, 80 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 81 | stdout, stderr = cmd.communicate() 82 | if cmd.returncode != 0: 83 | raise BazaarError( 84 | 'bzr info for %r failed.\n%s' % (name, stderr)) 85 | return (self.source['url'] in stdout.split()) 86 | 87 | def status(self, **kwargs): 88 | path = self.source['path'] 89 | env = dict(os.environ) 90 | env.pop('PYTHONPATH', None) 91 | cmd = subprocess.Popen( 92 | [self.bzr_executable, 'status'], cwd=path, 93 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 94 | stdout, stderr = cmd.communicate() 95 | status = stdout and 'dirty' or 'clean' 96 | if kwargs.get('verbose', False): 97 | return status, stdout 98 | else: 99 | return status 100 | 101 | def update(self, **kwargs): 102 | name = self.source['name'] 103 | if not self.matches(): 104 | raise BazaarError( 105 | "Can't update package %r because its URL doesn't match." % 106 | name) 107 | if self.status() != 'clean' and not kwargs.get('force', False): 108 | raise BazaarError( 109 | "Can't update package %r because it's dirty." % name) 110 | return self.bzr_pull(**kwargs) 111 | -------------------------------------------------------------------------------- /src/mr/developer/develop.py: -------------------------------------------------------------------------------- 1 | from mr.developer.common import logger, Config, get_commands 2 | from mr.developer.commands import CmdHelp 3 | from mr.developer.extension import Extension 4 | from zc.buildout.buildout import Buildout 5 | import argparse 6 | import atexit 7 | import pkg_resources 8 | import logging 9 | import os 10 | import sys 11 | import textwrap 12 | 13 | 14 | def find_base(): 15 | path = os.getcwd() 16 | while path: 17 | if os.path.exists(os.path.join(path, '.mr.developer.cfg')): 18 | break 19 | old_path = path 20 | path = os.path.dirname(path) 21 | if old_path == path: 22 | path = None 23 | break 24 | if path is None: 25 | raise IOError(".mr.developer.cfg not found") 26 | 27 | return path 28 | 29 | 30 | class ArgumentParser(argparse.ArgumentParser): 31 | def _check_value(self, action, value): 32 | # converted value must be one of the choices (if specified) 33 | if action.choices is not None and value not in action.choices: 34 | tup = value, ', '.join([repr(x) for x in sorted(action.choices) if x != 'pony']) 35 | msg = argparse._('invalid choice: %r (choose from %s)') % tup 36 | raise argparse.ArgumentError(action, msg) 37 | 38 | 39 | class HelpFormatter(argparse.HelpFormatter): 40 | def _split_lines(self, text, width): 41 | return self._fill_text(text, width, "").split("\n") 42 | 43 | def _fill_text(self, text, width, indent): 44 | result = [] 45 | for line in text.split("\n"): 46 | for line2 in textwrap.fill(line, width).split("\n"): 47 | result.append("%s%s" % (indent, line2)) 48 | return "\n".join(result) 49 | 50 | 51 | class Develop(object): 52 | def __call__(self, *args, **kwargs): 53 | logger.setLevel(logging.INFO) 54 | ch = logging.StreamHandler() 55 | ch.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) 56 | logger.addHandler(ch) 57 | self.parser = ArgumentParser() 58 | version = pkg_resources.get_distribution("mr.developer").version 59 | self.parser.add_argument('-v', '--version', 60 | action='version', 61 | version='mr.developer %s' % version) 62 | self.parsers = self.parser.add_subparsers(title="commands", metavar="") 63 | 64 | for command in get_commands(): 65 | command(self) 66 | 67 | if not args: 68 | args = None 69 | args = self.parser.parse_args(args) 70 | 71 | try: 72 | self.buildout_dir = find_base() 73 | except IOError: 74 | if isinstance(args.func, CmdHelp): 75 | args.func(args) 76 | return 77 | self.parser.print_help() 78 | print 79 | logger.error("You are not in a path which has mr.developer installed (%s)." % sys.exc_info()[1]) 80 | return 81 | 82 | self.config = Config(self.buildout_dir) 83 | self.original_dir = os.getcwd() 84 | atexit.register(self.restore_original_dir) 85 | os.chdir(self.buildout_dir) 86 | buildout = Buildout(self.config.buildout_settings['config_file'], 87 | self.config.buildout_options, 88 | self.config.buildout_settings['user_defaults'], 89 | self.config.buildout_settings['windows_restart']) 90 | root_logger = logging.getLogger() 91 | root_logger.handlers = [] 92 | root_logger.setLevel(logging.INFO) 93 | extension = Extension(buildout) 94 | self.sources = extension.get_sources() 95 | self.sources_dir = extension.get_sources_dir() 96 | self.auto_checkout = extension.get_auto_checkout() 97 | self.always_checkout = extension.get_always_checkout() 98 | self.update_git_submodules = extension.get_update_git_submodules() 99 | self.always_accept_server_certificate = extension.get_always_accept_server_certificate() 100 | develop, self.develeggs, versions = extension.get_develop_info() 101 | self.threads = extension.get_threads() 102 | 103 | args.func(args) 104 | 105 | def restore_original_dir(self): 106 | if os.path.exists(self.original_dir): 107 | os.chdir(self.original_dir) 108 | 109 | 110 | develop = Develop() 111 | -------------------------------------------------------------------------------- /src/mr/developer/darcs.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | import os 3 | import subprocess 4 | 5 | 6 | logger = common.logger 7 | 8 | 9 | class DarcsError(common.WCError): 10 | pass 11 | 12 | 13 | class DarcsWorkingCopy(common.BaseWorkingCopy): 14 | 15 | def __init__(self, source): 16 | super(DarcsWorkingCopy, self).__init__(source) 17 | self.darcs_executable = common.which('darcs') 18 | 19 | def darcs_checkout(self, **kwargs): 20 | name = self.source['name'] 21 | path = self.source['path'] 22 | url = self.source['url'] 23 | if os.path.exists(path): 24 | self.output((logger.info, "Skipped getting of existing package '%s'." % name)) 25 | return 26 | self.output((logger.info, "Getting '%s' with darcs." % name)) 27 | cmd = [self.darcs_executable, "get", "--quiet", "--lazy", url, path] 28 | cmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 29 | stdout, stderr = cmd.communicate() 30 | if cmd.returncode != 0: 31 | raise DarcsError("darcs get for '%s' failed.\n%s" % (name, stderr)) 32 | if kwargs.get('verbose', False): 33 | return stdout 34 | 35 | def darcs_update(self, **kwargs): 36 | name = self.source['name'] 37 | path = self.source['path'] 38 | self.output((logger.info, "Updating '%s' with darcs." % name)) 39 | cmd = subprocess.Popen([self.darcs_executable, "pull", "-a"], 40 | cwd=path, 41 | stdout=subprocess.PIPE, 42 | stderr=subprocess.PIPE) 43 | stdout, stderr = cmd.communicate() 44 | if cmd.returncode != 0: 45 | raise DarcsError("darcs pull for '%s' failed.\n%s" % (name, stderr)) 46 | if kwargs.get('verbose', False): 47 | return stdout 48 | 49 | def checkout(self, **kwargs): 50 | name = self.source['name'] 51 | path = self.source['path'] 52 | update = self.should_update(**kwargs) 53 | if os.path.exists(path): 54 | if update: 55 | self.update(**kwargs) 56 | elif self.matches(): 57 | self.output((logger.info, "Skipped checkout of existing package '%s'." % name)) 58 | else: 59 | raise DarcsError("Checkout URL for existing package '%s' differs. Expected '%s'." % (name, self.source['url'])) 60 | else: 61 | return self.darcs_checkout(**kwargs) 62 | 63 | def _darcs_related_repositories(self): 64 | name = self.source['name'] 65 | path = self.source['path'] 66 | repos = os.path.join(path, '_darcs', 'prefs', 'repos') 67 | if os.path.exists(repos): 68 | for line in open(repos).readlines(): 69 | yield line.strip() 70 | else: 71 | cmd = subprocess.Popen([self.darcs_executable, "show", "repo"], 72 | cwd=path, 73 | stdout=subprocess.PIPE, 74 | stderr=subprocess.PIPE) 75 | stdout, stderr = cmd.communicate() 76 | if cmd.returncode != 0: 77 | self.output((logger.error, "darcs info for '%s' failed.\n%s" % (name, stderr))) 78 | return 79 | 80 | lines = stdout.splitlines() 81 | for line in lines: 82 | k, v = line.split(':', 1) 83 | k = k.strip() 84 | v = v.strip() 85 | if k == 'Default Remote': 86 | yield v 87 | elif k == 'Cache': 88 | for cache in v.split(', '): 89 | if cache.startswith('repo:'): 90 | yield cache[5:] 91 | 92 | def matches(self): 93 | return self.source['url'] in self._darcs_related_repositories() 94 | 95 | def status(self, **kwargs): 96 | path = self.source['path'] 97 | cmd = subprocess.Popen([self.darcs_executable, "whatsnew"], 98 | cwd=path, 99 | stdout=subprocess.PIPE, 100 | stderr=subprocess.PIPE) 101 | stdout, stderr = cmd.communicate() 102 | lines = stdout.strip().split('\n') 103 | if 'No changes' in lines[-1]: 104 | status = 'clean' 105 | else: 106 | status = 'dirty' 107 | if kwargs.get('verbose', False): 108 | return status, stdout 109 | else: 110 | return status 111 | 112 | def update(self, **kwargs): 113 | name = self.source['name'] 114 | if not self.matches(): 115 | raise DarcsError("Can't update package '%s' because it's URL doesn't match." % name) 116 | if self.status() != 'clean' and not kwargs.get('force', False): 117 | raise DarcsError("Can't update package '%s' because it's dirty." % name) 118 | return self.darcs_update(**kwargs) 119 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_common.py: -------------------------------------------------------------------------------- 1 | from mr.developer.common import Config, Rewrite 2 | from mr.developer.common import get_commands, parse_buildout_args, version_sorted 3 | import pytest 4 | 5 | 6 | def test_find_internal_commands(): 7 | cmds = [x.__name__ for x in get_commands()] 8 | assert 'CmdActivate' in cmds 9 | assert 'CmdDeactivate' in cmds 10 | assert 'CmdHelp' in cmds 11 | 12 | 13 | class TestParseBuildoutArgs: 14 | def checkOptions(self, options): 15 | for option in options: 16 | assert len(option) == 3 17 | 18 | def testTimeoutValue(self): 19 | options, settings, args = parse_buildout_args(['-t', '5']) 20 | self.checkOptions(options) 21 | 22 | def testCommands(self): 23 | options, settings, args = parse_buildout_args(['-t', '5']) 24 | assert len(args) == 0 25 | options, settings, args = parse_buildout_args(['-t', '5', 'install', 'partname']) 26 | assert len(args) == 2 27 | 28 | def testAssignments(self): 29 | # You can override parameters from buildout sections on the command line. 30 | options, settings, args = parse_buildout_args(['versions:foo=42']) 31 | self.checkOptions(options) 32 | assert options[0] == ('versions', 'foo', '42') 33 | assert len(args) == 0 34 | # Without a colon in it, zc.buildout itself defaults to the 35 | # 'buildout' section. Issue 151. 36 | options, settings, args = parse_buildout_args(['foo=42']) 37 | self.checkOptions(options) 38 | assert options[0] == ('buildout', 'foo', '42') 39 | assert len(args) == 0 40 | 41 | 42 | def test_buildout_args_key_is_str(tempdir): 43 | config = Config('.') 44 | config_file = tempdir['config.cfg'] 45 | config_file.create_file( 46 | "[buildout]", 47 | "args = './bin/buildout'", 48 | " '-c'", 49 | " 'buildout.cfg'") 50 | read_config = config.read_config(config_file) 51 | assert type(read_config.get('buildout', 'args')) == str 52 | 53 | 54 | class TestRewrites: 55 | def testMissingSubstitute(self): 56 | pytest.raises(ValueError, Rewrite, ("url ~ foo")) 57 | 58 | def testInvalidOptions(self): 59 | pytest.raises(ValueError, Rewrite, ("name ~ foo\nbar")) 60 | pytest.raises(ValueError, Rewrite, ("path ~ foo\nbar")) 61 | 62 | def testPartialSubstitute(self): 63 | rewrite = Rewrite("url ~ fschulze(/mr.developer.git)\nme\\1") 64 | source = dict(url="https://github.com/fschulze/mr.developer.git") 65 | rewrite(source) 66 | assert source['url'] == "https://github.com/me/mr.developer.git" 67 | 68 | def testExactMatch(self): 69 | rewrite = Rewrite("url ~ fschulze(/mr.developer.git)\nme\\1\nkind = git") 70 | sources = [ 71 | dict(url="https://github.com/fschulze/mr.developer.git", kind='git'), 72 | dict(url="https://github.com/fschulze/mr.developer.git", kind='gitsvn'), 73 | dict(url="https://github.com/fschulze/mr.developer.git", kind='svn')] 74 | for source in sources: 75 | rewrite(source) 76 | assert sources[0]['url'] == "https://github.com/me/mr.developer.git" 77 | assert sources[1]['url'] == "https://github.com/fschulze/mr.developer.git" 78 | assert sources[2]['url'] == "https://github.com/fschulze/mr.developer.git" 79 | 80 | def testRegexpMatch(self): 81 | rewrite = Rewrite("url ~ fschulze(/mr.developer.git)\nme\\1\nkind ~= git") 82 | sources = [ 83 | dict(url="https://github.com/fschulze/mr.developer.git", kind='git'), 84 | dict(url="https://github.com/fschulze/mr.developer.git", kind='gitsvn'), 85 | dict(url="https://github.com/fschulze/mr.developer.git", kind='svn')] 86 | for source in sources: 87 | rewrite(source) 88 | assert sources[0]['url'] == "https://github.com/me/mr.developer.git" 89 | assert sources[1]['url'] == "https://github.com/me/mr.developer.git" 90 | assert sources[2]['url'] == "https://github.com/fschulze/mr.developer.git" 91 | 92 | def testRegexpMatchAndSubstitute(self): 93 | rewrite = Rewrite("url ~ fschulze(/mr.developer.git)\nme\\1\nurl ~= ^http:") 94 | sources = [ 95 | dict(url="http://github.com/fschulze/mr.developer.git"), 96 | dict(url="https://github.com/fschulze/mr.developer.git"), 97 | dict(url="https://github.com/fschulze/mr.developer.git")] 98 | for source in sources: 99 | rewrite(source) 100 | assert sources[0]['url'] == "http://github.com/me/mr.developer.git" 101 | assert sources[1]['url'] == "https://github.com/fschulze/mr.developer.git" 102 | assert sources[2]['url'] == "https://github.com/fschulze/mr.developer.git" 103 | 104 | 105 | def test_version_sorted(): 106 | expected = [ 107 | 'version-1-0-1', 108 | 'version-1-0-2', 109 | 'version-1-0-10'] 110 | actual = version_sorted([ 111 | 'version-1-0-10', 112 | 'version-1-0-2', 113 | 'version-1-0-1']) 114 | assert expected == actual 115 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_mercurial.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from mock import patch 5 | 6 | from mr.developer.extension import Source 7 | from mr.developer.tests.utils import Process 8 | from mr.developer.compat import b 9 | 10 | 11 | class TestMercurial: 12 | def testUpdateWithoutRevisionPin(self, develop, src, tempdir): 13 | from mr.developer.commands import CmdCheckout 14 | from mr.developer.commands import CmdUpdate 15 | repository = tempdir['repository'] 16 | os.mkdir(repository) 17 | process = Process(cwd=repository) 18 | process.check_call("hg init %s" % repository) 19 | 20 | foo = repository['foo'] 21 | foo.create_file('foo') 22 | process.check_call("hg add %s" % foo, echo=False) 23 | process.check_call("hg commit %s -m foo -u test" % foo, echo=False) 24 | bar = repository['bar'] 25 | bar.create_file('bar') 26 | process.check_call("hg add %s" % bar, echo=False) 27 | process.check_call("hg commit %s -m bar -u test" % bar, echo=False) 28 | develop.sources = { 29 | 'egg': Source( 30 | kind='hg', 31 | name='egg', 32 | url='%s' % repository, 33 | path=os.path.join(src, 'egg'))} 34 | _log = patch('mr.developer.mercurial.logger') 35 | log = _log.__enter__() 36 | try: 37 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 38 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'bar', 'foo')) 39 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 40 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'bar', 'foo')) 41 | assert log.method_calls == [ 42 | ('info', ("Cloned 'egg' with mercurial.",), {}), 43 | ('info', ("Updated 'egg' with mercurial.",), {}), 44 | ('info', ("Switched 'egg' to default.",), {})] 45 | finally: 46 | _log.__exit__(None, None, None) 47 | 48 | def testUpdateWithRevisionPin(self, develop, src, tempdir): 49 | from mr.developer.commands import CmdCheckout 50 | from mr.developer.commands import CmdUpdate 51 | repository = tempdir['repository'] 52 | os.mkdir(repository) 53 | process = Process(cwd=repository) 54 | lines = process.check_call("hg init %s" % repository) 55 | foo = repository['foo'] 56 | foo.create_file('foo') 57 | lines = process.check_call("hg add %s" % foo, echo=False) 58 | 59 | # create branch for testing 60 | lines = process.check_call("hg branch test", echo=False) 61 | 62 | lines = process.check_call("hg commit %s -m foo -u test" % foo, echo=False) 63 | 64 | # get comitted rev 65 | lines = process.check_call("hg log %s" % foo, echo=False) 66 | 67 | try: 68 | # XXX older version 69 | rev = lines[0].split()[1].split(b(':'))[1] 70 | except Exception: 71 | rev = lines[0].split()[1] 72 | 73 | # return to default branch 74 | lines = process.check_call("hg branch default", echo=False) 75 | 76 | bar = repository['bar'] 77 | bar.create_file('bar') 78 | lines = process.check_call("hg add %s" % bar, echo=False) 79 | lines = process.check_call("hg commit %s -m bar -u test" % bar, echo=False) 80 | 81 | # check rev 82 | develop.sources = { 83 | 'egg': Source( 84 | kind='hg', 85 | name='egg', 86 | rev=rev, 87 | url='%s' % repository, 88 | path=os.path.join(src, 'egg'))} 89 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 90 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'foo')) 91 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 92 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'foo')) 93 | 94 | # check branch 95 | develop.sources = { 96 | 'egg': Source( 97 | kind='hg', 98 | name='egg', 99 | branch='test', 100 | url='%s' % repository, 101 | path=os.path.join(src, 'egg'))} 102 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 103 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'foo')) 104 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 105 | assert set(os.listdir(os.path.join(src, 'egg'))) == set(('.hg', 'foo')) 106 | 107 | # we can't use both rev and branch 108 | with pytest.raises(SystemExit): 109 | develop.sources = { 110 | 'egg': Source( 111 | kind='hg', 112 | name='egg', 113 | branch='test', 114 | rev=rev, 115 | url='%s' % repository, 116 | path=os.path.join(src, 'egg-failed'))} 117 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 118 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_commands.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | import pytest 3 | 4 | 5 | class MockSource(dict): 6 | def exists(self): 7 | return getattr(self, '_exists', True) 8 | 9 | 10 | class TestCommand: 11 | @pytest.fixture 12 | def command(self, develop): 13 | from mr.developer.commands import Command 14 | develop.sources = ['foo', 'bar', 'baz', 'ham'] 15 | develop.auto_checkout = set(['foo', 'ham']) 16 | return Command(develop) 17 | 18 | def testEmptyMatchList(self, command): 19 | pkgs = command.get_packages([]) 20 | assert pkgs == set(['foo', 'bar', 'baz', 'ham']) 21 | 22 | def testEmptyMatchListAuto(self, command): 23 | pkgs = command.get_packages([], auto_checkout=True) 24 | assert pkgs == set(['foo', 'ham']) 25 | 26 | def testSingleArgMatchingOne(self, command): 27 | pkgs = command.get_packages(['ha']) 28 | assert pkgs == set(['ham']) 29 | 30 | def testSingleArgMatchingMultiple(self, command): 31 | pkgs = command.get_packages(['ba']) 32 | assert pkgs == set(['bar', 'baz']) 33 | 34 | def testArgsMatchingOne(self, command): 35 | pkgs = command.get_packages(['ha', 'zap']) 36 | assert pkgs == set(['ham']) 37 | 38 | def testArgsMatchingMultiple(self, command): 39 | pkgs = command.get_packages(['ba', 'zap']) 40 | assert pkgs == set(['bar', 'baz']) 41 | 42 | def testArgsMatchingMultiple2(self, command): 43 | pkgs = command.get_packages(['ha', 'ba']) 44 | assert pkgs == set(['bar', 'baz', 'ham']) 45 | 46 | def testSingleArgMatchingOneAuto(self, command): 47 | pkgs = command.get_packages(['ha'], auto_checkout=True) 48 | assert pkgs == set(['ham']) 49 | 50 | def testSingleArgMatchingMultipleAuto(self, command): 51 | pytest.raises( 52 | SystemExit, 53 | command.get_packages, ['ba'], auto_checkout=True) 54 | 55 | def testArgsMatchingOneAuto(self, command): 56 | pkgs = command.get_packages(['ha', 'zap'], auto_checkout=True) 57 | assert pkgs == set(['ham']) 58 | 59 | def testArgsMatchingMultipleAuto(self, command): 60 | pytest.raises( 61 | SystemExit, 62 | command.get_packages, ['ba', 'zap'], auto_checkout=True) 63 | 64 | def testArgsMatchingMultiple2Auto(self, command): 65 | pkgs = command.get_packages(['ha', 'ba'], auto_checkout=True) 66 | assert pkgs == set(['ham']) 67 | 68 | 69 | class TestDeactivateCommand: 70 | @pytest.fixture 71 | def develop(self, develop): 72 | develop.sources = dict( 73 | foo=MockSource(), 74 | bar=MockSource(), 75 | baz=MockSource(), 76 | ham=MockSource()) 77 | develop.auto_checkout = set(['foo', 'ham']) 78 | develop.config.develop['foo'] = 'auto' 79 | develop.config.develop['ham'] = 'auto' 80 | return develop 81 | 82 | @pytest.fixture 83 | def cmd(self, develop): 84 | from mr.developer.commands import CmdDeactivate 85 | return CmdDeactivate(develop) 86 | 87 | def testDeactivateDeactivatedPackage(self, cmd, develop): 88 | develop.config.develop['bar'] = False 89 | args = develop.parser.parse_args(args=['deactivate', 'bar']) 90 | _logger = patch('mr.developer.develop.logger') 91 | logger = _logger.__enter__() 92 | try: 93 | cmd(args) 94 | finally: 95 | _logger.__exit__(None, None, None) 96 | assert develop.config.develop == dict( 97 | bar=False, 98 | foo='auto', 99 | ham='auto') 100 | assert logger.mock_calls == [] 101 | 102 | def testDeactivateActivatedPackage(self, cmd, develop): 103 | develop.config.develop['bar'] = True 104 | args = develop.parser.parse_args(args=['deactivate', 'bar']) 105 | _logger = patch('mr.developer.commands.logger') 106 | logger = _logger.__enter__() 107 | try: 108 | cmd(args) 109 | finally: 110 | _logger.__exit__(None, None, None) 111 | assert develop.config.develop == dict( 112 | bar=False, 113 | foo='auto', 114 | ham='auto') 115 | assert logger.mock_calls == [ 116 | ('info', ("Deactivated 'bar'.",), {}), 117 | ('warn', ("Don't forget to run buildout again, so the deactived packages are actually not used anymore.",), {})] 118 | 119 | def testDeactivateAutoCheckoutPackage(self, cmd, develop): 120 | args = develop.parser.parse_args(args=['deactivate', 'foo']) 121 | _logger = patch('mr.developer.commands.logger') 122 | logger = _logger.__enter__() 123 | try: 124 | cmd(args) 125 | finally: 126 | _logger.__exit__(None, None, None) 127 | assert develop.config.develop == dict( 128 | foo=False, 129 | ham='auto') 130 | assert logger.mock_calls == [ 131 | ('info', ("Deactivated 'foo'.",), {}), 132 | ('warn', ("Don't forget to run buildout again, so the deactived packages are actually not used anymore.",), {})] 133 | 134 | 135 | class TestHelpCommand: 136 | @pytest.fixture 137 | def cmd(self, develop): 138 | from mr.developer.commands import CmdHelp 139 | return CmdHelp(develop) 140 | 141 | def testHelp(self, cmd, develop, capsys): 142 | args = develop.parser.parse_args(args=['help']) 143 | cmd(args) 144 | out, err = capsys.readouterr() 145 | assert 'Available commands' in out 146 | -------------------------------------------------------------------------------- /src/mr/developer/tests/utils.py: -------------------------------------------------------------------------------- 1 | from subprocess import Popen, PIPE 2 | from mr.developer.compat import s 3 | import os 4 | import sys 5 | import threading 6 | 7 | 8 | def tee(process, filter_func): 9 | """Read lines from process.stdout and echo them to sys.stdout. 10 | 11 | Returns a list of lines read. Lines are not newline terminated. 12 | 13 | The 'filter_func' is a callable which is invoked for every line, 14 | receiving the line as argument. If the filter_func returns True, the 15 | line is echoed to sys.stdout. 16 | """ 17 | # We simply use readline here, more fancy IPC is not warranted 18 | # in the context of this package. 19 | lines = [] 20 | while True: 21 | line = process.stdout.readline() 22 | if line: 23 | stripped_line = line.rstrip() 24 | if filter_func(stripped_line): 25 | sys.stdout.write(s(line)) 26 | lines.append(stripped_line) 27 | elif process.poll() is not None: 28 | break 29 | return lines 30 | 31 | 32 | def tee2(process, filter_func): 33 | """Read lines from process.stderr and echo them to sys.stderr. 34 | 35 | The 'filter_func' is a callable which is invoked for every line, 36 | receiving the line as argument. If the filter_func returns True, the 37 | line is echoed to sys.stderr. 38 | """ 39 | while True: 40 | line = process.stderr.readline() 41 | if line: 42 | stripped_line = line.rstrip() 43 | if filter_func(stripped_line): 44 | sys.stderr.write(s(line)) 45 | elif process.poll() is not None: 46 | break 47 | 48 | 49 | class background_thread(object): 50 | """Context manager to start and stop a background thread.""" 51 | 52 | def __init__(self, target, args): 53 | self.target = target 54 | self.args = args 55 | 56 | def __enter__(self): 57 | self._t = threading.Thread(target=self.target, args=self.args) 58 | self._t.start() 59 | return self._t 60 | 61 | def __exit__(self, *ignored): 62 | self._t.join() 63 | 64 | 65 | def popen(cmd, echo=True, echo2=True, env=None, cwd=None): 66 | """Run 'cmd' and return a two-tuple of exit code and lines read. 67 | 68 | If 'echo' is True, the stdout stream is echoed to sys.stdout. 69 | If 'echo2' is True, the stderr stream is echoed to sys.stderr. 70 | 71 | The 'echo' and 'echo2' arguments may also be callables, in which 72 | case they are used as tee filters. 73 | 74 | The 'env' argument allows to pass a dict replacing os.environ. 75 | 76 | if 'cwd' is not None, current directory will be changed to cwd before execution. 77 | """ 78 | if not callable(echo): 79 | if echo: 80 | echo = On() 81 | else: 82 | echo = Off() 83 | 84 | if not callable(echo2): 85 | if echo2: 86 | echo2 = On() 87 | else: 88 | echo2 = Off() 89 | 90 | process = Popen( 91 | cmd, 92 | shell=True, 93 | stdout=PIPE, 94 | stderr=PIPE, 95 | env=env, 96 | cwd=cwd 97 | ) 98 | 99 | bt = background_thread(tee2, (process, echo2)) 100 | bt.__enter__() 101 | try: 102 | lines = tee(process, echo) 103 | finally: 104 | bt.__exit__(None, None, None) 105 | return process.returncode, lines 106 | 107 | 108 | class On(object): 109 | """A tee filter printing all lines.""" 110 | 111 | def __call__(self, line): 112 | return True 113 | 114 | 115 | class Off(object): 116 | """A tee filter suppressing all lines.""" 117 | 118 | def __call__(self, line): 119 | return False 120 | 121 | 122 | class Process(object): 123 | """Process related functions using the tee module.""" 124 | 125 | def __init__(self, quiet=False, env=None, cwd=None): 126 | self.quiet = quiet 127 | self.env = env 128 | self.cwd = cwd 129 | 130 | def popen(self, cmd, echo=True, echo2=True, cwd=None): 131 | # env *replaces* os.environ 132 | if self.quiet: 133 | echo = echo2 = False 134 | return popen(cmd, echo, echo2, env=self.env, cwd=self.cwd or cwd) 135 | 136 | def check_call(self, cmd, **kw): 137 | rc, lines = self.popen(cmd, **kw) 138 | assert rc == 0 139 | return lines 140 | 141 | 142 | class MockConfig(object): 143 | def __init__(self): 144 | self.buildout_args = [] 145 | self.develop = {} 146 | self.rewrites = [] 147 | 148 | def save(self): 149 | pass 150 | 151 | 152 | class MockDevelop(object): 153 | def __init__(self): 154 | from mr.developer.develop import ArgumentParser 155 | self.always_accept_server_certificate = True 156 | self.always_checkout = False 157 | self.auto_checkout = '' 158 | self.update_git_submodules = 'always' 159 | self.develeggs = '' 160 | self.config = MockConfig() 161 | self.parser = ArgumentParser() 162 | self.parsers = self.parser.add_subparsers(title="commands", metavar="") 163 | self.threads = 1 164 | 165 | 166 | class GitRepo(object): 167 | def __init__(self, base): 168 | self.base = base 169 | self.url = 'file:///%s' % self.base 170 | self.process = Process(cwd=self.base) 171 | 172 | def __call__(self, cmd, **kw): 173 | return self.process.check_call(cmd, **kw) 174 | 175 | def init(self): 176 | os.mkdir(self.base) 177 | self("git init") 178 | 179 | def setup_user(self): 180 | self('git config user.email "florian.schulze@gmx.net"') 181 | self('git config user.name "Florian Schulze"') 182 | 183 | def add_file(self, fname, msg=None): 184 | repo_file = self.base[fname] 185 | repo_file.create_file(fname) 186 | self("git add %s" % repo_file, echo=False) 187 | if msg is None: 188 | msg = fname 189 | self("git commit %s -m %s" % (repo_file, msg), echo=False) 190 | 191 | def add_submodule(self, submodule, submodule_name): 192 | assert isinstance(submodule, GitRepo) 193 | self("git -c protocol.file.allow=always submodule add %s %s" % (submodule.url, submodule_name)) 194 | self("git add .gitmodules") 195 | self("git add %s" % submodule_name) 196 | self("git commit -m 'Add submodule %s'" % submodule_name) 197 | 198 | def add_branch(self, bname, msg=None): 199 | self("git checkout -b %s" % bname) 200 | -------------------------------------------------------------------------------- /src/mr/developer/cvs.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | import os 3 | import re 4 | import subprocess 5 | 6 | logger = common.logger 7 | 8 | RE_ROOT = re.compile(r'(:pserver:)([a-zA-Z0-9]*)(@.*)') 9 | 10 | 11 | class CVSError(common.WCError): 12 | pass 13 | 14 | 15 | def build_cvs_command(command, name, url, tag='', cvs_root='', tag_file=None): 16 | """ 17 | Create CVS commands. 18 | 19 | Examples:: 20 | 21 | >>> build_cvs_command('checkout', 'package.name', 'python/package.name') 22 | ['cvs', 'checkout', '-P', '-f', '-d', 'package.name', 'python/package.name'] 23 | >>> build_cvs_command('update', 'package.name', 'python/package.name') 24 | ['cvs', 'update', '-P', '-f', '-d'] 25 | >>> build_cvs_command('checkout', 'package.name', 'python/package.name', tag='package_name_0-1-0') 26 | ['cvs', 'checkout', '-P', '-r', 'package_name_0-1-0', '-d', 'package.name', 'python/package.name'] 27 | >>> build_cvs_command('update', 'package.name', 'python/package.name', tag='package_name_0-1-0') 28 | ['cvs', 'update', '-P', '-r', 'package_name_0-1-0', '-d'] 29 | >>> build_cvs_command('checkout', 'package.name', 'python/package.name', cvs_root=':pserver:user@127.0.0.1:/repos') 30 | ['cvs', '-d', ':pserver:user@127.0.0.1:/repos', 'checkout', '-P', '-f', '-d', 'package.name', 'python/package.name'] 31 | >>> build_cvs_command('status', 'package.name', 'python/package.name') 32 | ['cvs', '-q', '-n', 'update'] 33 | >>> build_cvs_command('tags', 'package.name', 'python/package.name', tag_file='setup.py') 34 | ['cvs', '-Q', 'log', 'setup.py'] 35 | 36 | """ 37 | if command == 'status': 38 | return ['cvs', '-q', '-n', 'update'] 39 | 40 | cmd = [common.which('cvs', default='cvs')] 41 | if cvs_root: 42 | cmd.extend(['-d', cvs_root]) 43 | 44 | if command == 'tags': 45 | cmd.extend(['-Q', 'log']) 46 | if not tag_file: 47 | tag_file = 'setup.py' 48 | cmd.append(tag_file) 49 | else: 50 | cmd.extend([command, '-P']) 51 | if tag: 52 | cmd.extend(['-r', tag]) 53 | else: 54 | cmd.append('-f') 55 | cmd.append('-d') 56 | if command == 'checkout': 57 | cmd.extend([name, url]) 58 | return cmd 59 | 60 | 61 | class CVSWorkingCopy(common.BaseWorkingCopy): 62 | 63 | def __init__(self, source): 64 | super(CVSWorkingCopy, self).__init__(source) 65 | if self.source.get('newest_tag', '').lower() in ['1', 'true', 'yes']: 66 | self.source['tag'] = self._get_newest_tag() 67 | 68 | def cvs_command(self, command, **kwargs): 69 | name = self.source['name'] 70 | path = self.source['path'] 71 | url = self.source['url'] 72 | tag = self.source.get('tag') 73 | 74 | cvs_root = self.source.get('cvs_root') 75 | tag_file = self.source.get('tag_file') 76 | self.output((logger.info, 'Running %s %r from CVS.' % (command, name))) 77 | cmd = build_cvs_command(command, name, url, tag, cvs_root, tag_file) 78 | 79 | # because CVS can not work on absolute paths, we must execute cvs commands 80 | # in destination or in parent directory of destination 81 | old_cwd = os.getcwd() 82 | if command == 'checkout': 83 | path = os.path.dirname(path) 84 | os.chdir(path) 85 | 86 | try: 87 | cmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 88 | stdout, stderr = cmd.communicate() 89 | finally: 90 | os.chdir(old_cwd) 91 | 92 | if cmd.returncode != 0: 93 | raise CVSError('CVS %s for %r failed.\n%s' % (command, name, stderr)) 94 | if command == 'tags': 95 | return self._format_tags_list(stdout) 96 | if kwargs.get('verbose', False): 97 | return stdout 98 | 99 | def checkout(self, **kwargs): 100 | name = self.source['name'] 101 | path = self.source['path'] 102 | update = self.should_update(**kwargs) 103 | if os.path.exists(path): 104 | if update: 105 | self.update(**kwargs) 106 | elif self.matches(): 107 | self.output((logger.info, 'Skipped checkout of existing package %r.' % name)) 108 | else: 109 | raise CVSError( 110 | 'Source URL for existing package %r differs. ' 111 | 'Expected %r.' % (name, self.source['url'])) 112 | else: 113 | return self.cvs_command('checkout', **kwargs) 114 | 115 | def matches(self): 116 | def normalize_root(text): 117 | """ 118 | Removes username from CVS Root path. 119 | """ 120 | return RE_ROOT.sub(r'\1\3', text) 121 | 122 | path = self.source['path'] 123 | 124 | repo_file = os.path.join(path, 'CVS', 'Repository') 125 | if not os.path.exists(repo_file): 126 | raise CVSError('Can not find CVS/Repository file in %s.' % path) 127 | repo = open(repo_file).read().strip() 128 | 129 | cvs_root = self.source.get('cvs_root') 130 | if cvs_root: 131 | root_file = os.path.join(path, 'CVS', 'Root') 132 | root = open(root_file).read().strip() 133 | if normalize_root(cvs_root) != normalize_root(root): 134 | return False 135 | 136 | return (self.source['url'] == repo) 137 | 138 | def status(self, **kwargs): 139 | path = self.source['path'] 140 | 141 | # packages before checkout is clean 142 | if not os.path.exists(path): 143 | return 'clean' 144 | 145 | status = 'clean' 146 | stdout = self.cvs_command('status', verbose=True) 147 | for line in stdout.split('\n'): 148 | if not line or line.endswith('.egg-info'): 149 | continue 150 | if line[0] == 'C': 151 | # there is file with conflict 152 | status = 'conflict' 153 | break 154 | if line[0] in ('M', '?', 'A', 'R'): 155 | # some files are localy modified 156 | status = 'modified' 157 | 158 | if kwargs.get('verbose', False): 159 | return status, stdout 160 | else: 161 | return status 162 | 163 | def update(self, **kwargs): 164 | name = self.source['name'] 165 | if not self.matches(): 166 | raise CVSError( 167 | "Can't update package %r, because its URL doesn't match." % 168 | name) 169 | if self.status() != 'clean' and not kwargs.get('force', False): 170 | raise CVSError( 171 | "Can't update package %r, because it's dirty." % name) 172 | return self.cvs_command('update', **kwargs) 173 | 174 | def _format_tags_list(self, stdout): 175 | output = [] 176 | tag_line_re = re.compile(r'([^: ]+): [0-9.]+') 177 | list_started = False 178 | for line in stdout.split('\n'): 179 | if list_started: 180 | matched = tag_line_re.match(line.strip()) 181 | if matched: 182 | output.append(matched.groups()[0]) 183 | else: 184 | list_started = False 185 | elif 'symbolic names:' in line: 186 | list_started = True 187 | return list(set(output)) 188 | 189 | def _get_newest_tag(self): 190 | try: 191 | tags = self.cvs_command('tags') 192 | except OSError: 193 | return None 194 | mask = self.source.get('newest_tag_prefix', self.source.get('newest_tag_mask', '')) 195 | if mask: 196 | tags = [t for t in tags if t.startswith(mask)] 197 | tags = common.version_sorted(tags, reverse=True) 198 | if not tags: 199 | return None 200 | newest_tag = tags[0] 201 | self.output((logger.info, 'Picked newest tag for %r from CVS: %r.' % (self.source['name'], newest_tag))) 202 | return newest_tag 203 | -------------------------------------------------------------------------------- /src/mr/developer/mercurial.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | from mr.developer.compat import b 3 | import re 4 | import os 5 | import subprocess 6 | 7 | logger = common.logger 8 | 9 | 10 | class MercurialError(common.WCError): 11 | pass 12 | 13 | 14 | class MercurialWorkingCopy(common.BaseWorkingCopy): 15 | 16 | def __init__(self, source): 17 | self.hg_executable = common.which('hg') 18 | source.setdefault('branch', 'default') 19 | source.setdefault('rev') 20 | super(MercurialWorkingCopy, self).__init__(source) 21 | 22 | def hg_clone(self, **kwargs): 23 | name = self.source['name'] 24 | path = self.source['path'] 25 | url = self.source['url'] 26 | 27 | if os.path.exists(path): 28 | self.output((logger.info, 'Skipped cloning of existing package %r.' % name)) 29 | return 30 | rev = self.get_rev() 31 | self.output((logger.info, 'Cloned %r with mercurial.' % name)) 32 | env = dict(os.environ) 33 | env.pop('PYTHONPATH', None) 34 | cmd = subprocess.Popen( 35 | [self.hg_executable, 'clone', '--updaterev', rev, '--quiet', '--noninteractive', url, path], 36 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 37 | stdout, stderr = cmd.communicate() 38 | if cmd.returncode != 0: 39 | raise MercurialError( 40 | 'hg clone for %r failed.\n%s' % (name, stderr)) 41 | if kwargs.get('verbose', False): 42 | return stdout 43 | 44 | def get_rev(self): 45 | branch = self.source['branch'] 46 | rev = self.source['rev'] 47 | 48 | if branch != 'default': 49 | if rev: 50 | raise ValueError("'branch' and 'rev' parameters cannot be used simultanously") 51 | else: 52 | rev = branch 53 | else: 54 | rev = rev or 'default' 55 | 56 | if self.source.get('newest_tag', '').lower() in ['1', 'true', 'yes']: 57 | rev = self._get_newest_tag() or rev 58 | return rev 59 | 60 | def _update_to_rev(self, rev): 61 | path = self.source['path'] 62 | name = self.source['name'] 63 | env = dict(os.environ) 64 | env.pop('PYTHONPATH', None) 65 | cmd = subprocess.Popen( 66 | [self.hg_executable, 'checkout', rev, '-c'], 67 | cwd=path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 68 | stdout, stderr = cmd.communicate() 69 | if cmd.returncode: 70 | raise MercurialError( 71 | 'hg update for %r failed.\n%s' % (name, stderr)) 72 | self.output((logger.info, 'Switched %r to %s.' % (name, rev))) 73 | return stdout 74 | 75 | def _get_tags(self): 76 | path = self.source['path'] 77 | name = self.source['name'] 78 | env = dict(os.environ) 79 | env.pop('PYTHONPATH', None) 80 | try: 81 | cmd = subprocess.Popen( 82 | [self.hg_executable, 'tags'], 83 | cwd=path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 84 | except OSError: 85 | return [] 86 | stdout, stderr = cmd.communicate() 87 | if cmd.returncode: 88 | raise MercurialError( 89 | 'hg update for %r failed.\n%s' % (name, stderr)) 90 | 91 | tag_line_re = re.compile(r'([^\s]+)[\s]*.*') 92 | 93 | def get_tag_name(line): 94 | matched = tag_line_re.match(line) 95 | if matched: 96 | return matched.groups()[0] 97 | 98 | tags = (get_tag_name(line) for line in stdout.split("\n")) 99 | return [tag for tag in tags if tag and tag != 'tip'] 100 | 101 | def _get_newest_tag(self): 102 | mask = self.source.get('newest_tag_prefix', self.source.get('newest_tag_mask', '')) 103 | name = self.source['name'] 104 | tags = self._get_tags() 105 | if mask: 106 | tags = [t for t in tags if t.startswith(mask)] 107 | tags = common.version_sorted(tags, reverse=True) 108 | if not tags: 109 | return None 110 | newest_tag = tags[0] 111 | self.output((logger.info, 'Picked newest tag for %r from Mercurial: %r.' % (name, newest_tag))) 112 | return newest_tag 113 | 114 | def hg_pull(self, **kwargs): 115 | # NOTE: we don't include the branch here as we just want to update 116 | # to the head of whatever branch the developer is working on 117 | # However the 'rev' parameter works differently and forces revision 118 | name = self.source['name'] 119 | path = self.source['path'] 120 | self.output((logger.info, 'Updated %r with mercurial.' % name)) 121 | env = dict(os.environ) 122 | env.pop('PYTHONPATH', None) 123 | cmd = subprocess.Popen( 124 | [self.hg_executable, 'pull', '-u'], 125 | cwd=path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 126 | stdout, stderr = cmd.communicate() 127 | if cmd.returncode != 0: 128 | # hg v2.1 pull returns non-zero return code in case of 129 | # no remote changes. 130 | if 'no changes found' not in stdout: 131 | raise MercurialError( 132 | 'hg pull for %r failed.\n%s' % (name, stderr)) 133 | # to find newest_tag hg pull is needed before 134 | rev = self.get_rev() 135 | if rev: 136 | stdout += self._update_to_rev(rev) 137 | if kwargs.get('verbose', False): 138 | return stdout 139 | 140 | def checkout(self, **kwargs): 141 | name = self.source['name'] 142 | path = self.source['path'] 143 | update = self.should_update(**kwargs) 144 | if os.path.exists(path): 145 | if update: 146 | self.update(**kwargs) 147 | elif self.matches(): 148 | self.output((logger.info, 'Skipped checkout of existing package %r.' % name)) 149 | else: 150 | raise MercurialError( 151 | 'Source URL for existing package %r differs. ' 152 | 'Expected %r.' % (name, self.source['url'])) 153 | else: 154 | return self.hg_clone(**kwargs) 155 | 156 | def matches(self): 157 | name = self.source['name'] 158 | path = self.source['path'] 159 | env = dict(os.environ) 160 | env.pop('PYTHONPATH', None) 161 | cmd = subprocess.Popen( 162 | [self.hg_executable, 'showconfig', 'paths.default'], cwd=path, 163 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 164 | stdout, stderr = cmd.communicate() 165 | if cmd.returncode != 0: 166 | raise MercurialError( 167 | 'hg showconfig for %r failed.\n%s' % (name, stderr)) 168 | # now check that the working branch is the same 169 | return b(self.source['url'] + '\n') == stdout 170 | 171 | def status(self, **kwargs): 172 | path = self.source['path'] 173 | env = dict(os.environ) 174 | env.pop('PYTHONPATH', None) 175 | cmd = subprocess.Popen( 176 | [self.hg_executable, 'status'], cwd=path, 177 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 178 | stdout, stderr = cmd.communicate() 179 | status = stdout and 'dirty' or 'clean' 180 | if status == 'clean': 181 | cmd = subprocess.Popen( 182 | [self.hg_executable, 'outgoing'], cwd=path, 183 | env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 184 | outgoing_stdout, stderr = cmd.communicate() 185 | stdout += b('\n') + outgoing_stdout 186 | if cmd.returncode == 0: 187 | status = 'ahead' 188 | if kwargs.get('verbose', False): 189 | return status, stdout 190 | else: 191 | return status 192 | 193 | def update(self, **kwargs): 194 | name = self.source['name'] 195 | if not self.matches(): 196 | raise MercurialError( 197 | "Can't update package %r because its URL doesn't match." % 198 | name) 199 | if self.status() != 'clean' and not kwargs.get('force', False): 200 | raise MercurialError( 201 | "Can't update package %r because it's dirty." % name) 202 | return self.hg_pull(**kwargs) 203 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_git.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | import pytest 5 | from mock import patch 6 | 7 | from mr.developer.extension import Source 8 | from mr.developer.tests.utils import Process 9 | 10 | 11 | class TestGit: 12 | def createDefaultContent(self, repository): 13 | # Create default content and branches in a repository. 14 | # Return a revision number. 15 | repository.add_file('foo', msg='Initial') 16 | # create branch for testing 17 | repository("git checkout -b test", echo=False) 18 | repository.add_file('foo2') 19 | # get comitted rev 20 | lines = repository("git log", echo=False) 21 | rev = lines[0].split()[1] 22 | # return to default branch 23 | repository("git checkout master", echo=False) 24 | repository.add_file('bar') 25 | # Return revision of one of the commits, the one that adds the 26 | # foo2 file. 27 | return rev 28 | 29 | def testUpdateWithRevisionPin(self, develop, mkgitrepo, src): 30 | from mr.developer.commands import CmdCheckout 31 | from mr.developer.commands import CmdUpdate 32 | from mr.developer.commands import CmdStatus 33 | repository = mkgitrepo('repository') 34 | rev = self.createDefaultContent(repository) 35 | 36 | # check rev 37 | develop.sources = { 38 | 'egg': Source( 39 | kind='git', 40 | name='egg', 41 | rev=rev, 42 | url='%s' % repository.base, 43 | path=src['egg'])} 44 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 45 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 46 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 47 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 48 | 49 | shutil.rmtree(src['egg']) 50 | 51 | # check branch 52 | develop.sources = { 53 | 'egg': Source( 54 | kind='git', 55 | name='egg', 56 | branch='test', 57 | url='%s' % repository.base, 58 | path=src['egg'])} 59 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 60 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 61 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 62 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 63 | CmdStatus(develop)(develop.parser.parse_args(['status'])) 64 | 65 | # switch implicitly to master branch 66 | develop.sources = { 67 | 'egg': Source( 68 | kind='git', 69 | name='egg', 70 | url='%s' % repository.base, 71 | path=src['egg'])} 72 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 73 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 74 | 75 | # Switch to specific revision, then switch back to master branch. 76 | develop.sources = { 77 | 'egg': Source( 78 | kind='git', 79 | name='egg', 80 | rev=rev, 81 | url='%s' % repository.base, 82 | path=src['egg'])} 83 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 84 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 85 | develop.sources = { 86 | 'egg': Source( 87 | kind='git', 88 | name='egg', 89 | url='%s' % repository.base, 90 | path=src['egg'])} 91 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 92 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 93 | 94 | CmdStatus(develop)(develop.parser.parse_args(['status'])) 95 | 96 | # we can't use both rev and branch 97 | with pytest.raises(SystemExit): 98 | develop.sources = { 99 | 'egg': Source( 100 | kind='git', 101 | name='egg', 102 | branch='test', 103 | rev=rev, 104 | url='%s' % repository.base, 105 | path=src['egg-failed'])} 106 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 107 | 108 | def testUpdateWithoutRevisionPin(self, develop, mkgitrepo, src, capsys): 109 | from mr.developer.commands import CmdCheckout 110 | from mr.developer.commands import CmdUpdate 111 | from mr.developer.commands import CmdStatus 112 | repository = mkgitrepo('repository') 113 | repository.add_file('foo') 114 | repository.add_file('bar') 115 | repository.add_branch('develop') 116 | develop.sources = { 117 | 'egg': Source( 118 | kind='git', 119 | name='egg', 120 | url=repository.url, 121 | path=src['egg'])} 122 | _log = patch('mr.developer.git.logger') 123 | log = _log.__enter__() 124 | try: 125 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 126 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 127 | captured = capsys.readouterr() 128 | assert captured.out.startswith("Initialized empty Git repository in") 129 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 130 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 131 | assert log.method_calls == [ 132 | ('info', ("Cloned 'egg' with git from '%s'." % repository.url,), {}), 133 | ('info', ("Updated 'egg' with git.",), {}), 134 | ('info', ("Switching to remote branch 'remotes/origin/master'.",), {})] 135 | captured = capsys.readouterr() 136 | assert captured.out == "" 137 | CmdStatus(develop)(develop.parser.parse_args(['status', '-v'])) 138 | captured = capsys.readouterr() 139 | assert captured.out == "~ A egg\n ## master...origin/master\n\n" 140 | 141 | finally: 142 | _log.__exit__(None, None, None) 143 | 144 | def testUpdateVerbose(self, develop, mkgitrepo, src, capsys): 145 | from mr.developer.commands import CmdCheckout 146 | from mr.developer.commands import CmdUpdate 147 | from mr.developer.commands import CmdStatus 148 | repository = mkgitrepo('repository') 149 | repository.add_file('foo') 150 | repository.add_file('bar') 151 | repository.add_branch('develop') 152 | develop.sources = { 153 | 'egg': Source( 154 | kind='git', 155 | name='egg', 156 | url=repository.url, 157 | path=src['egg'])} 158 | _log = patch('mr.developer.git.logger') 159 | log = _log.__enter__() 160 | try: 161 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg', '-v'])) 162 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 163 | captured = capsys.readouterr() 164 | assert captured.out.startswith("Initialized empty Git repository in") 165 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg', '-v'])) 166 | assert set(os.listdir(src['egg'])) == set(('.git', 'bar', 'foo')) 167 | assert log.method_calls == [ 168 | ('info', ("Cloned 'egg' with git from '%s'." % repository.url,), {}), 169 | ('info', ("Updated 'egg' with git.",), {}), 170 | ('info', ("Switching to remote branch 'remotes/origin/master'.",), {})] 171 | captured = capsys.readouterr() 172 | assert "set up to track" in captured.out 173 | CmdStatus(develop)(develop.parser.parse_args(['status', '-v'])) 174 | captured = capsys.readouterr() 175 | assert captured.out == "~ A egg\n ## master...origin/master\n\n" 176 | 177 | finally: 178 | _log.__exit__(None, None, None) 179 | 180 | def testDepthOption(self, mkgitrepo, src, tempdir): 181 | from mr.developer.develop import develop 182 | 183 | # create repository and make two commits on it 184 | repository = mkgitrepo('repository') 185 | self.createDefaultContent(repository) 186 | 187 | tempdir['buildout.cfg'].create_file( 188 | '[buildout]', 189 | 'mr.developer-threads = 1', 190 | '[sources]', 191 | 'egg = git %s' % repository.url) 192 | tempdir['.mr.developer.cfg'].create_file() 193 | # os.chdir(self.tempdir) 194 | develop('co', 'egg') 195 | 196 | # check that there are two commits in history 197 | egg_process = Process(cwd=src['egg']) 198 | lines = egg_process.check_call("git log", echo=False) 199 | commits = [msg for msg in lines 200 | if msg.decode('utf-8').startswith('commit')] 201 | assert len(commits) == 2 202 | 203 | shutil.rmtree(src['egg']) 204 | 205 | tempdir['buildout.cfg'].create_file( 206 | '[buildout]', 207 | 'mr.developer-threads = 1', 208 | '[sources]', 209 | 'egg = git %s depth=1' % repository.url) 210 | develop('co', 'egg') 211 | 212 | # check that there is only one commit in history 213 | lines = egg_process.check_call("git log", echo=False) 214 | commits = [msg for msg in lines 215 | if msg.decode('utf-8').startswith('commit')] 216 | assert len(commits) == 1 217 | 218 | shutil.rmtree(src['egg']) 219 | 220 | tempdir['buildout.cfg'].create_file( 221 | '[buildout]', 222 | 'mr.developer-threads = 1', 223 | 'git-clone-depth = 1', 224 | '[sources]', 225 | 'egg = git %s' % repository.url) 226 | develop('co', 'egg') 227 | 228 | # check that there is only one commit in history 229 | lines = egg_process.check_call("git log", echo=False) 230 | commits = [msg for msg in lines 231 | if msg.decode('utf-8').startswith('commit')] 232 | assert len(commits) == 1 233 | 234 | # You should be able to combine depth and cloning a branch. 235 | # Otherwise with a depth of 1 you could clone the master 236 | # branch and then not be able to switch to the wanted branch, 237 | # because this branch would not be there: the revision that it 238 | # points to is not in the downloaded history. 239 | shutil.rmtree(src['egg']) 240 | tempdir['buildout.cfg'].create_file( 241 | '[buildout]', 242 | 'mr.developer-threads = 1', 243 | 'git-clone-depth = 1', 244 | '[sources]', 245 | 'egg = git %s branch=test' % repository.url) 246 | develop('co', 'egg') 247 | 248 | # check that there is only one commit in history 249 | lines = egg_process.check_call("git log", echo=False) 250 | commits = [msg for msg in lines 251 | if msg.decode('utf-8').startswith('commit')] 252 | assert len(commits) == 1 253 | 254 | # Check that the expected files from the branch are there 255 | assert set(os.listdir(src['egg'])) == set(('.git', 'foo', 'foo2')) 256 | -------------------------------------------------------------------------------- /HELP.rst: -------------------------------------------------------------------------------- 1 | Commands 2 | ======== 3 | 4 | The following is a list of all commands and their options. 5 | 6 | activate (a) 7 | ------------ 8 | 9 | :: 10 | 11 | usage: develop activate [-h] [-a] [-c] [-d] 12 | package-regexp [package-regexp ...] 13 | 14 | Add packages to the list of development packages. 15 | 16 | positional arguments: 17 | package-regexp A regular expression to match package names. 18 | 19 | optional arguments: 20 | -h, --help show this help message and exit 21 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 22 | you don't specify a then all declared 23 | packages are processed. 24 | -c, --checked-out Only considers packages currently checked out. If you 25 | don't specify a then all checked out 26 | packages are processed. 27 | -d, --develop Only considers packages currently in development mode. 28 | If you don't specify a then all 29 | develop packages are processed. 30 | 31 | 32 | arguments (args) 33 | ---------------- 34 | 35 | :: 36 | 37 | usage: develop arguments [-h] 38 | 39 | Print arguments used by last buildout which will be used with the 'rebuild' 40 | command. 41 | 42 | optional arguments: 43 | -h, --help show this help message and exit 44 | 45 | 46 | checkout (co) 47 | ------------- 48 | 49 | :: 50 | 51 | usage: develop checkout [-h] [-a] [-v] package-regexp [package-regexp ...] 52 | 53 | Make a checkout of the packages matching the regular expressions and add them 54 | to the list of development packages. 55 | 56 | positional arguments: 57 | package-regexp A regular expression to match package names. 58 | 59 | optional arguments: 60 | -h, --help show this help message and exit 61 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 62 | you don't specify a then all declared 63 | packages are processed. 64 | -v, --verbose Show output of VCS command. 65 | 66 | 67 | deactivate (d) 68 | -------------- 69 | 70 | :: 71 | 72 | usage: develop deactivate [-h] [-a] [-c] [-d] 73 | package-regexp [package-regexp ...] 74 | 75 | Remove packages from the list of development packages. 76 | 77 | positional arguments: 78 | package-regexp A regular expression to match package names. 79 | 80 | optional arguments: 81 | -h, --help show this help message and exit 82 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 83 | you don't specify a then all declared 84 | packages are processed. 85 | -c, --checked-out Only considers packages currently checked out. If you 86 | don't specify a then all checked out 87 | packages are processed. 88 | -d, --develop Only considers packages currently in development mode. 89 | If you don't specify a then all 90 | develop packages are processed. 91 | 92 | 93 | help (h) 94 | -------- 95 | 96 | :: 97 | 98 | usage: develop help [-h] [--rst] [-z] [command] 99 | 100 | Show help on the given command or about the whole script if none given. 101 | 102 | positional arguments: 103 | command The command you want to see the help of. 104 | 105 | optional arguments: 106 | -h, --help show this help message and exit 107 | --rst Print help for all commands in reStructuredText format. 108 | -z, --zsh Print info for zsh autocompletion 109 | 110 | 111 | info 112 | ---- 113 | 114 | :: 115 | 116 | usage: develop info [-h] [-a] [-c] [-d] [--name] [-p] [--type] [--url] 117 | [package-regexp [package-regexp ...]] 118 | 119 | Lists informations about packages. 120 | 121 | positional arguments: 122 | package-regexp A regular expression to match package names. 123 | 124 | optional arguments: 125 | -h, --help show this help message and exit 126 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 127 | you don't specify a then all declared 128 | packages are processed. 129 | -c, --checked-out Only considers packages currently checked out. If you 130 | don't specify a then all declared 131 | packages are processed. 132 | -d, --develop Only considers packages currently in development mode. 133 | If you don't specify a then all 134 | declared packages are processed. 135 | 136 | Output options: 137 | The following options are used to print just the info you want, the order 138 | they are specified reflects the order in which the information will be 139 | printed. 140 | 141 | --name Prints the name of the package. 142 | -p, --path Prints the absolute path of the package. 143 | --type Prints the repository type of the package. 144 | --url Prints the URL of the package. 145 | 146 | 147 | list (ls) 148 | --------- 149 | 150 | :: 151 | 152 | usage: develop list [-h] [-a] [-c] [-d] [-l] [-s] 153 | [package-regexp [package-regexp ...]] 154 | 155 | Lists tracked packages. 156 | 157 | positional arguments: 158 | package-regexp A regular expression to match package names. 159 | 160 | optional arguments: 161 | -h, --help show this help message and exit 162 | -a, --auto-checkout Only show packages in auto-checkout list. 163 | -c, --checked-out Only considers packages currently checked out. If you 164 | don't specify a then all checked out 165 | packages are processed. 166 | -d, --develop Only considers packages currently in development mode. 167 | If you don't specify a then all 168 | develop packages are processed. 169 | -l, --long Show URL and kind of package. 170 | -s, --status Show checkout status. 171 | The first column in the output shows the checkout 172 | status: 173 | '#' available for checkout 174 | ' ' in auto-checkout list and checked out 175 | '~' not in auto-checkout list, but checked out 176 | '!' in auto-checkout list, but not checked out 177 | 'C' the repository URL doesn't match 178 | 179 | 180 | purge 181 | ----- 182 | 183 | :: 184 | 185 | usage: develop purge [-h] [-n] [-f] [package-regexp [package-regexp ...]] 186 | 187 | Remove checked out packages which aren't active anymore. 188 | 189 | Only 'svn' packages can be purged, because other repositories may contain 190 | unrecoverable files even when not marked as 'dirty'. 191 | 192 | positional arguments: 193 | package-regexp A regular expression to match package names. 194 | 195 | optional arguments: 196 | -h, --help show this help message and exit 197 | -n, --dry-run Don't actually remove anything, just print the paths which 198 | would be removed. 199 | -f, --force Force purge even if the working copy is dirty or unknown 200 | (non-svn). 201 | 202 | 203 | rebuild (rb) 204 | ------------ 205 | 206 | :: 207 | 208 | usage: develop rebuild [-h] [-n] 209 | 210 | Run buildout with the last used arguments. 211 | 212 | optional arguments: 213 | -h, --help show this help message and exit 214 | -n, --dry-run DEPRECATED: Use 'arguments' command instead. Don't actually 215 | run buildout, just show the last used arguments. 216 | 217 | 218 | reset 219 | ----- 220 | 221 | :: 222 | 223 | usage: develop reset [-h] [-a] [-c] [-d] [package-regexp [package-regexp ...]] 224 | 225 | Resets the packages develop status. This is useful when switching to a new 226 | buildout configuration. 227 | 228 | positional arguments: 229 | package-regexp A regular expression to match package names. 230 | 231 | optional arguments: 232 | -h, --help show this help message and exit 233 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 234 | you don't specify a then all declared 235 | packages are processed. 236 | -c, --checked-out Only considers packages currently checked out. If you 237 | don't specify a then all checked out 238 | packages are processed. 239 | -d, --develop Only considers packages currently in development mode. 240 | If you don't specify a then all 241 | develop packages are processed. 242 | 243 | 244 | status (stat, st) 245 | ----------------- 246 | 247 | :: 248 | 249 | usage: develop status [-h] [-a] [-c] [-d] [-v] 250 | [package-regexp [package-regexp ...]] 251 | 252 | Shows the status of tracked packages, filtered if is given. 253 | The first column in the output shows the checkout status: 254 | ' ' in auto-checkout list 255 | '~' not in auto-checkout list 256 | '!' in auto-checkout list, but not checked out 257 | 'C' the repository URL doesn't match 258 | '?' unknown package (only reported when package-regexp is not specified) 259 | The second column shows the working copy status: 260 | ' ' no changes 261 | 'M' local modifications or untracked files 262 | '>' your local branch is ahead of the remote one 263 | The third column shows the development status: 264 | ' ' activated 265 | '-' deactivated 266 | '!' deactivated, but the package is in the auto-checkout list 267 | 'A' activated, but not in list of development packages (run buildout) 268 | 'D' deactivated, but still in list of development packages (run buildout) 269 | 270 | positional arguments: 271 | package-regexp A regular expression to match package names. 272 | 273 | optional arguments: 274 | -h, --help show this help message and exit 275 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 276 | you don't specify a then all declared 277 | packages are processed. 278 | -c, --checked-out Only considers packages currently checked out. If you 279 | don't specify a then all checked out 280 | packages are processed. 281 | -d, --develop Only considers packages currently in development mode. 282 | If you don't specify a then all 283 | develop packages are processed. 284 | -v, --verbose Show output of VCS command. 285 | 286 | 287 | update (up) 288 | ----------- 289 | 290 | :: 291 | 292 | usage: develop update [-h] [-a] [-d] [-f] [-v] 293 | [package-regexp [package-regexp ...]] 294 | 295 | Updates all known packages currently checked out. 296 | 297 | positional arguments: 298 | package-regexp A regular expression to match package names. 299 | 300 | optional arguments: 301 | -h, --help show this help message and exit 302 | -a, --auto-checkout Only considers packages declared by auto-checkout. If 303 | you don't specify a then all declared 304 | packages are processed. 305 | -d, --develop Only considers packages currently in development mode. 306 | If you don't specify a then all 307 | develop packages are processed. 308 | -f, --force Force update even if the working copy is dirty. 309 | -v, --verbose Show output of VCS command. 310 | 311 | 312 | -------------------------------------------------------------------------------- /src/mr/developer/extension.py: -------------------------------------------------------------------------------- 1 | from mr.developer.common import memoize, WorkingCopies, Config, get_workingcopytypes 2 | import logging 3 | import os 4 | import re 5 | import sys 6 | 7 | 8 | FAKE_PART_ID = '_mr.developer' 9 | 10 | logger = logging.getLogger("mr.developer") 11 | 12 | 13 | def safe_name(name): 14 | """Convert an arbitrary string to a standard distribution name 15 | 16 | Any runs of non-alphanumeric/. characters are replaced with a single '-'. 17 | 18 | This is copied from pkg_resources.safe_name. 19 | (formerly setuptools.package_index.safe_name) 20 | """ 21 | return re.sub('[^A-Za-z0-9.]+', '-', name) 22 | 23 | 24 | class Source(dict): 25 | def exists(self): 26 | return os.path.exists(self['path']) 27 | 28 | 29 | class Extension(object): 30 | def __init__(self, buildout): 31 | self.buildout = buildout 32 | self.buildout_dir = buildout['buildout']['directory'] 33 | self.executable = sys.argv[0] 34 | 35 | @memoize 36 | def get_config(self): 37 | return Config(self.buildout_dir) 38 | 39 | def get_workingcopies(self): 40 | return WorkingCopies( 41 | self.get_sources(), 42 | threads=self.get_threads()) 43 | 44 | @memoize 45 | def get_threads(self): 46 | threads = int(self.buildout['buildout'].get( 47 | 'mr.developer-threads', 48 | self.get_config().threads)) 49 | return threads 50 | 51 | @memoize 52 | def get_mrdev_verbose(self): 53 | return self.buildout['buildout'].get('mr.developer-verbose', '').lower() == 'true' 54 | 55 | @memoize 56 | def get_sources_dir(self): 57 | sources_dir = self.buildout['buildout'].get('sources-dir', 'src') 58 | if not os.path.isabs(sources_dir): 59 | sources_dir = os.path.join(self.buildout_dir, sources_dir) 60 | if os.path.isdir(self.buildout_dir) and not os.path.isdir(sources_dir): 61 | logger.info('Creating missing sources dir %s.' % sources_dir) 62 | os.mkdir(sources_dir) 63 | return sources_dir 64 | 65 | @memoize 66 | def get_sources(self): 67 | from zc.buildout.buildout import MissingSection 68 | sources_dir = self.get_sources_dir() 69 | sources = {} 70 | sources_section = self.buildout['buildout'].get('sources', 'sources') 71 | try: 72 | section = self.buildout[sources_section] 73 | except MissingSection: 74 | if sys.exc_info()[1].args[0] == sources_section: 75 | section = {} 76 | else: 77 | raise 78 | workingcopytypes = get_workingcopytypes() 79 | for name in section: 80 | info = section[name].split() 81 | options = [] 82 | option_matcher = re.compile(r'[a-zA-Z0-9-]+=.*') 83 | for index, item in reversed(list(enumerate(info))): 84 | if option_matcher.match(item): 85 | del info[index] 86 | options.append(item) 87 | options.reverse() 88 | if len(info) < 2: 89 | logger.error("The source definition of '%s' needs at least the repository kind and URL." % name) 90 | sys.exit(1) 91 | kind = info[0] 92 | if kind not in workingcopytypes: 93 | logger.error("Unknown repository type '%s' for source '%s'." % (kind, name)) 94 | sys.exit(1) 95 | url = info[1] 96 | 97 | path = None 98 | if len(info) > 2: 99 | if '=' not in info[2]: 100 | logger.warning("You should use 'path=%s' to set the path." % info[2]) 101 | path = os.path.join(info[2], name) 102 | if not os.path.isabs(path): 103 | path = os.path.join(self.buildout_dir, path) 104 | options[:0] = info[3:] 105 | else: 106 | options[:0] = info[2:] 107 | 108 | if path is None: 109 | source = Source(kind=kind, name=name, url=url) 110 | else: 111 | source = Source(kind=kind, name=name, url=url, path=path) 112 | 113 | for option in options: 114 | key, value = option.split('=', 1) 115 | if not key: 116 | raise ValueError("Option with no name '%s'." % option) 117 | if key in source: 118 | raise ValueError("Key '%s' already in source info." % key) 119 | if key == 'path': 120 | value = os.path.join(value, name) 121 | if not os.path.isabs(value): 122 | value = os.path.join(self.buildout_dir, value) 123 | if key == 'full-path': 124 | if not os.path.isabs(value): 125 | value = os.path.join(self.buildout_dir, value) 126 | if key == 'egg': 127 | if value.lower() in ('true', 'yes', 'on'): 128 | value = True 129 | elif value.lower() in ('false', 'no', 'off'): 130 | value = False 131 | if key == 'depth': 132 | try: 133 | not_used = int(value) # noqa 134 | except ValueError: 135 | raise ValueError('depth value needs to be a number.') 136 | source[key] = value 137 | if 'path' not in source: 138 | if 'full-path' in source: 139 | source['path'] = source['full-path'] 140 | else: 141 | source['path'] = os.path.join(sources_dir, name) 142 | 143 | if 'depth' not in source and \ 144 | self.get_git_clone_depth(): 145 | source['depth'] = self.get_git_clone_depth() 146 | 147 | for rewrite in self.get_config().rewrites: 148 | rewrite(source) 149 | 150 | sources[name] = source 151 | 152 | return sources 153 | 154 | @memoize 155 | def get_auto_checkout(self): 156 | packages = set(self.get_sources().keys()) 157 | 158 | auto_checkout = set( 159 | self.buildout['buildout'].get('auto-checkout', '').split() 160 | ) 161 | if '*' in auto_checkout: 162 | auto_checkout = packages 163 | 164 | if not auto_checkout.issubset(packages): 165 | diff = list(sorted(auto_checkout.difference(packages))) 166 | if len(diff) > 1: 167 | pkgs = "%s and '%s'" % (", ".join("'%s'" % x for x in diff[:-1]), diff[-1]) 168 | logger.error("The packages %s from auto-checkout have no source information." % pkgs) 169 | else: 170 | logger.error("The package '%s' from auto-checkout has no source information." % diff[0]) 171 | sys.exit(1) 172 | 173 | return auto_checkout 174 | 175 | def get_always_checkout(self): 176 | return self.buildout['buildout'].get('always-checkout', False) 177 | 178 | def get_update_git_submodules(self): 179 | return self.buildout['buildout'].get('update-git-submodules', 'always') 180 | 181 | def get_git_clone_depth(self): 182 | value = self.buildout['buildout'].get('git-clone-depth', '') 183 | if value: 184 | try: 185 | not_used = int(value) # noqa 186 | except ValueError: 187 | raise ValueError('git-clone-depth needs to be a number.') 188 | return value 189 | 190 | def get_develop_info(self): 191 | auto_checkout = self.get_auto_checkout() 192 | sources = self.get_sources() 193 | develop = self.buildout['buildout'].get('develop', '') 194 | versions_section = self.buildout['buildout'].get('versions') 195 | versions = self.buildout._raw.get(versions_section, {}) 196 | develeggs = {} 197 | develeggs_order = [] 198 | for path in develop.split(): 199 | # strip / from end of path 200 | head, tail = os.path.split(path.rstrip('/')) 201 | develeggs[tail] = path 202 | develeggs_order.append(tail) 203 | config_develop = self.get_config().develop 204 | for name in sources: 205 | source = sources[name] 206 | if source.get('egg', True) and name not in develeggs: 207 | path = sources[name]['path'] 208 | status = config_develop.get(name, name in auto_checkout) 209 | if os.path.exists(path) and status: 210 | if name in auto_checkout: 211 | config_develop.setdefault(name, 'auto') 212 | else: 213 | if status == 'auto': 214 | if name in config_develop: 215 | del config_develop[name] 216 | continue 217 | config_develop.setdefault(name, True) 218 | develeggs[name] = path 219 | develeggs_order.append(name) 220 | versions[safe_name(name)] = '' 221 | develop = [] 222 | for path in [develeggs[k] for k in develeggs_order]: 223 | if path.startswith(self.buildout_dir): 224 | develop.append(path[len(self.buildout_dir) + 1:]) 225 | else: 226 | develop.append(path) 227 | return develop, develeggs, versions 228 | 229 | def get_always_accept_server_certificate(self): 230 | always_accept_server_certificate = self.buildout['buildout'].get('always-accept-server-certificate', False) 231 | if isinstance(always_accept_server_certificate, bool): 232 | pass 233 | elif always_accept_server_certificate.lower() in ('true', 'yes', 'on'): 234 | always_accept_server_certificate = True 235 | elif always_accept_server_certificate.lower() in ('false', 'no', 'off'): 236 | always_accept_server_certificate = False 237 | else: 238 | logger.error("Unknown value '%s' for always-accept-server-certificate option." % always_accept_server_certificate) 239 | sys.exit(1) 240 | return always_accept_server_certificate 241 | 242 | def add_fake_part(self): 243 | if FAKE_PART_ID in self.buildout._raw: 244 | logger.error("The buildout already has a '%s' section, this shouldn't happen" % FAKE_PART_ID) 245 | sys.exit(1) 246 | self.buildout._raw[FAKE_PART_ID] = dict( 247 | recipe='zc.recipe.egg', 248 | eggs='mr.developer', 249 | ) 250 | # insert the fake part 251 | parts = self.buildout['buildout']['parts'].split() 252 | parts.insert(0, FAKE_PART_ID) 253 | self.buildout['buildout']['parts'] = " ".join(parts) 254 | 255 | def __call__(self): 256 | config = self.get_config() 257 | 258 | # store arguments when running from buildout 259 | if os.path.split(self.executable)[1] in ('buildout', 'buildout-script.py'): 260 | config.buildout_args = list(sys.argv) 261 | 262 | auto_checkout = self.get_auto_checkout() 263 | 264 | root_logger = logging.getLogger() 265 | workingcopies = self.get_workingcopies() 266 | always_checkout = self.get_always_checkout() 267 | update_git_submodules = self.get_update_git_submodules() 268 | always_accept_server_certificate = self.get_always_accept_server_certificate() 269 | (develop, develeggs, versions) = self.get_develop_info() 270 | 271 | packages = set(auto_checkout) 272 | sources = self.get_sources() 273 | for pkg in develeggs: 274 | if pkg in sources: 275 | if always_checkout or sources[pkg].get('update'): 276 | packages.add(pkg) 277 | 278 | offline = self.buildout['buildout'].get('offline', '').lower() == 'true' 279 | verbose = root_logger.level <= 10 or self.get_mrdev_verbose() 280 | workingcopies.checkout(sorted(packages), 281 | verbose=verbose, 282 | update=always_checkout, 283 | submodules=update_git_submodules, 284 | always_accept_server_certificate=always_accept_server_certificate, 285 | offline=offline) 286 | 287 | # get updated info after checkout 288 | (develop, develeggs, versions) = self.get_develop_info() 289 | 290 | if versions: 291 | import zc.buildout.easy_install 292 | zc.buildout.easy_install.default_versions(dict(versions)) 293 | 294 | self.buildout['buildout']['develop'] = "\n".join(develop) 295 | self.buildout['buildout']['sources-dir'] = self.get_sources_dir() 296 | 297 | self.add_fake_part() 298 | 299 | config.save() 300 | 301 | 302 | def extension(buildout=None): 303 | return Extension(buildout)() 304 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_extension.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from mock import patch 3 | from mr.developer.extension import Extension 4 | from mr.developer.tests.utils import MockConfig 5 | from zc.buildout.buildout import MissingSection 6 | import os 7 | import pytest 8 | 9 | 10 | class MockBuildout(object): 11 | def __init__(self, config=None): 12 | if config is None: 13 | config = dict() 14 | self._raw = deepcopy(config) 15 | 16 | def __contains__(self, key): 17 | return key in self._raw 18 | 19 | def __delitem__(self, key): 20 | del self._raw[key] 21 | 22 | def __getitem__(self, key): 23 | try: 24 | return self._raw[key] 25 | except KeyError: 26 | raise MissingSection(key) 27 | 28 | def get(self, key, default=None): 29 | return self._raw.get(key, default) 30 | 31 | def __repr__(self): 32 | return repr(self._raw) 33 | 34 | 35 | class MockWorkingCopies(object): 36 | def __init__(self, sources): 37 | self.sources = sources 38 | self._events = [] 39 | 40 | def checkout(self, packages, **kwargs): 41 | self._events.append(('checkout', packages, kwargs)) 42 | return False 43 | 44 | 45 | class TestExtensionClass: 46 | @pytest.fixture 47 | def buildout(self): 48 | return MockBuildout(dict( 49 | buildout=dict( 50 | directory='/buildout', 51 | parts=''), 52 | sources={})) 53 | 54 | @pytest.fixture 55 | def extension(self, buildout): 56 | from mr.developer.extension import memoize 57 | 58 | class MockExtension(Extension): 59 | @memoize 60 | def get_config(self): 61 | return MockConfig() 62 | 63 | @memoize 64 | def get_workingcopies(self): 65 | return MockWorkingCopies(self.get_sources()) 66 | 67 | return MockExtension(buildout) 68 | 69 | def testPartAdded(self, buildout, extension): 70 | assert '_mr.developer' not in buildout['buildout']['parts'] 71 | extension() 72 | assert '_mr.developer' in buildout 73 | assert '_mr.developer' in buildout['buildout']['parts'] 74 | 75 | def testPartExists(self, buildout, extension): 76 | buildout._raw['_mr.developer'] = {} 77 | pytest.raises(SystemExit, extension) 78 | 79 | def testArgsIgnoredIfNotBuildout(self, extension): 80 | extension() 81 | assert extension.get_config().buildout_args == [] 82 | 83 | def testBuildoutArgsSaved(self, extension): 84 | extension.executable = 'buildout' 85 | extension() 86 | assert hasattr(extension.get_config(), 'buildout_args') 87 | 88 | def testAutoCheckout(self, buildout, extension): 89 | buildout['sources'].update({ 90 | 'pkg.foo': 'svn dummy://pkg.foo', 91 | 'pkg.bar': 'svn dummy://pkg.bar', 92 | }) 93 | buildout['buildout']['auto-checkout'] = 'pkg.foo' 94 | extension() 95 | wcs = extension.get_workingcopies() 96 | assert len(wcs._events) == 1 97 | assert wcs._events[0][0] == 'checkout' 98 | assert wcs._events[0][1] == ['pkg.foo'] 99 | 100 | def testAutoCheckoutMissingSource(self, buildout, extension): 101 | buildout['buildout']['auto-checkout'] = 'pkg.foo' 102 | pytest.raises(SystemExit, extension.get_auto_checkout) 103 | 104 | def testAutoCheckoutMissingSources(self, buildout, extension): 105 | buildout['buildout']['auto-checkout'] = 'pkg.foo pkg.bar' 106 | pytest.raises(SystemExit, extension.get_auto_checkout) 107 | 108 | def testAutoCheckoutWildcard(self, buildout, extension): 109 | buildout['sources'].update({ 110 | 'pkg.foo': 'svn dummy://pkg.foo', 111 | 'pkg.bar': 'svn dummy://pkg.bar', 112 | }) 113 | buildout['buildout']['auto-checkout'] = '*' 114 | extension() 115 | wcs = extension.get_workingcopies() 116 | len(wcs._events) == 1 117 | wcs._events[0][0] == 'checkout' 118 | wcs._events[0][1] == ['pkg.bar', 'pkg.foo'] 119 | 120 | def testRewriteSources(self, buildout, extension): 121 | from mr.developer.common import LegacyRewrite 122 | buildout['sources'].update({ 123 | 'pkg.foo': 'svn dummy://pkg.foo', 124 | 'pkg.bar': 'svn baz://pkg.bar', 125 | }) 126 | extension.get_config().rewrites.append( 127 | LegacyRewrite('dummy://', 'ham://')) 128 | sources = extension.get_sources() 129 | assert sources['pkg.foo']['url'] == 'ham://pkg.foo' 130 | assert sources['pkg.bar']['url'] == 'baz://pkg.bar' 131 | 132 | def _testEmptySourceDefinition(self, buildout, extension): 133 | # TODO handle this case 134 | buildout['sources'].update({ 135 | 'pkg.foo': '', 136 | }) 137 | extension.get_sources() 138 | 139 | def _testTooShortSourceDefinition(self, buildout, extension): 140 | # TODO handle this case 141 | buildout['sources'].update({ 142 | 'pkg.foo': 'svn', 143 | }) 144 | extension.get_sources() 145 | 146 | def testRepositoryKindChecking(self, buildout, extension): 147 | buildout['sources'].update({ 148 | 'pkg.bar': 'dummy://foo/trunk svn', 149 | }) 150 | pytest.raises(SystemExit, extension.get_sources) 151 | buildout['sources'].update({ 152 | 'pkg.bar': 'foo dummy://foo/trunk', 153 | }) 154 | pytest.raises(SystemExit, extension.get_sources) 155 | 156 | def testOldSourcePathParsing(self, buildout, extension): 157 | buildout['sources'].update({ 158 | 'pkg.bar': 'svn dummy://foo/trunk', 159 | 'pkg.ham': 'git dummy://foo/trunk ham', 160 | 'pkg.baz': 'git dummy://foo/trunk other/baz', 161 | 'pkg.foo': 'git dummy://foo/trunk /foo', 162 | }) 163 | sources = extension.get_sources() 164 | assert sources['pkg.bar']['path'] == os.path.join(os.sep, 'buildout', 'src', 'pkg.bar') 165 | assert sources['pkg.ham']['path'] == os.path.join(os.sep, 'buildout', 'ham', 'pkg.ham') 166 | assert sources['pkg.baz']['path'] == os.path.join(os.sep, 'buildout', 'other', 'baz', 'pkg.baz') 167 | assert sources['pkg.foo']['path'] == os.path.join(os.sep, 'foo', 'pkg.foo') 168 | 169 | def testSourcePathParsing(self, buildout, extension): 170 | buildout['sources'].update({ 171 | 'pkg.bar': 'svn dummy://foo/trunk', 172 | 'pkg.ham': 'git dummy://foo/trunk path=ham', 173 | 'pkg.baz': 'git dummy://foo/trunk path=other/baz', 174 | 'pkg.foo': 'git dummy://foo/trunk path=/foo', 175 | }) 176 | sources = extension.get_sources() 177 | assert sources['pkg.bar']['path'] == os.path.join(os.sep, 'buildout', 'src', 'pkg.bar') 178 | assert sources['pkg.ham']['path'] == os.path.join(os.sep, 'buildout', 'ham', 'pkg.ham') 179 | assert sources['pkg.baz']['path'] == os.path.join(os.sep, 'buildout', 'other', 'baz', 'pkg.baz') 180 | assert sources['pkg.foo']['path'] == os.path.join(os.sep, 'foo', 'pkg.foo') 181 | 182 | def testOptionParsing(self, buildout, extension): 183 | buildout['sources'].update({ 184 | 'pkg.bar': 'svn dummy://foo/trunk revision=456', 185 | 'pkg.ham': 'git dummy://foo/trunk ham rev=456ad138', 186 | 'pkg.foo': 'git dummy://foo/trunk rev=>=456ad138 branch=blubber', 187 | }) 188 | sources = extension.get_sources() 189 | 190 | assert sorted(sources['pkg.bar'].keys()) == ['kind', 'name', 'path', 'revision', 'url'] 191 | assert sources['pkg.bar']['revision'] == '456' 192 | 193 | assert sorted(sources['pkg.ham'].keys()) == ['kind', 'name', 'path', 'rev', 'url'] 194 | assert sources['pkg.ham']['path'] == os.path.join(os.sep, 'buildout', 'ham', 'pkg.ham') 195 | assert sources['pkg.ham']['rev'] == '456ad138' 196 | 197 | assert sorted(sources['pkg.foo'].keys()) == ['branch', 'kind', 'name', 'path', 'rev', 'url'] 198 | assert sources['pkg.foo']['branch'] == 'blubber' 199 | assert sources['pkg.foo']['rev'] == '>=456ad138' 200 | 201 | def testOptionParsingBeforeURL(self, buildout, extension): 202 | buildout['sources'].update({ 203 | 'pkg.bar': 'svn revision=456 dummy://foo/trunk', 204 | 'pkg.ham': 'git rev=456ad138 dummy://foo/trunk ham', 205 | 'pkg.foo': 'git rev=>=456ad138 branch=blubber dummy://foo/trunk', 206 | }) 207 | sources = extension.get_sources() 208 | 209 | assert sorted(sources['pkg.bar'].keys()) == ['kind', 'name', 'path', 'revision', 'url'] 210 | assert sources['pkg.bar']['revision'] == '456' 211 | 212 | assert sorted(sources['pkg.ham'].keys()) == ['kind', 'name', 'path', 'rev', 'url'] 213 | assert sources['pkg.ham']['path'] == os.path.join(os.sep, 'buildout', 'ham', 'pkg.ham') 214 | assert sources['pkg.ham']['rev'] == '456ad138' 215 | 216 | assert sorted(sources['pkg.foo'].keys()) == ['branch', 'kind', 'name', 'path', 'rev', 'url'] 217 | assert sources['pkg.foo']['branch'] == 'blubber' 218 | assert sources['pkg.foo']['rev'] == '>=456ad138' 219 | 220 | def testDuplicateOptionParsing(self, buildout, extension): 221 | buildout['sources'].update({ 222 | 'pkg.foo': 'git dummy://foo/trunk rev=456ad138 rev=blubber', 223 | }) 224 | pytest.raises(ValueError, extension.get_sources) 225 | 226 | buildout['sources'].update({ 227 | 'pkg.foo': 'git dummy://foo/trunk kind=svn', 228 | }) 229 | pytest.raises(ValueError, extension.get_sources) 230 | 231 | def testInvalidOptionParsing(self, buildout, extension): 232 | buildout['sources'].update({ 233 | 'pkg.foo': 'git dummy://foo/trunk rev=456ad138 =foo', 234 | }) 235 | pytest.raises(ValueError, extension.get_sources) 236 | 237 | def testDevelopHonored(self, buildout, extension): 238 | buildout['buildout']['develop'] = '/normal/develop ' \ 239 | '/develop/with/slash/' 240 | 241 | (develop, develeggs, versions) = extension.get_develop_info() 242 | assert '/normal/develop' in develop 243 | assert '/develop/with/slash/' in develop 244 | assert 'slash' in develeggs 245 | assert 'develop' in develeggs 246 | assert develeggs['slash'] == '/develop/with/slash/' 247 | assert develeggs['develop'] == '/normal/develop' 248 | 249 | def testDevelopSafeName(self, buildout, extension): 250 | '''We have two source packages: 251 | - pkg.bar_foo 252 | - pkg.foo_bar 253 | both of them have a pinned version. 254 | 255 | If we auto-checkout pkg.foo_bar it gets unpinned! 256 | ''' 257 | buildout['sources'].update({ 258 | 'pkg.bar_foo': 'svn dummy://pkg.bar_foo', 259 | 'pkg.foo_bar': 'svn dummy://pkg.foo_bar', 260 | }) 261 | buildout['buildout']['auto-checkout'] = 'pkg.foo_bar' 262 | buildout._raw['buildout']['versions'] = 'versions' 263 | buildout._raw['versions'] = { 264 | 'pkg.foo-bar': '1.0', 265 | 'pkg.bar-foo': '1.0', 266 | } 267 | _exists = patch('os.path.exists') 268 | exists = _exists.__enter__() 269 | try: 270 | exists().return_value = True 271 | 272 | (develop, develeggs, versions) = extension.get_develop_info() 273 | finally: 274 | _exists.__exit__(None, None, None) 275 | assert buildout['versions'] == { 276 | 'pkg.foo-bar': '', 277 | 'pkg.bar-foo': '1.0'} 278 | 279 | def testDevelopOrder(self, buildout, extension): 280 | buildout['buildout']['develop'] = '/normal/develop ' \ 281 | '/develop/with/slash/' 282 | 283 | (develop, develeggs, versions) = extension.get_develop_info() 284 | assert develop == ['/normal/develop', '/develop/with/slash/'] 285 | 286 | def testDevelopSourcesMix(self, buildout, extension): 287 | buildout['sources'].update({ 288 | 'pkg.bar': 'svn dummy://foo/trunk'}) 289 | buildout['buildout']['auto-checkout'] = 'pkg.bar' 290 | buildout['buildout']['develop'] = '/normal/develop ' \ 291 | '/develop/with/slash/' 292 | 293 | _exists = patch('os.path.exists') 294 | exists = _exists.__enter__() 295 | try: 296 | exists().return_value = True 297 | (develop, develeggs, versions) = extension.get_develop_info() 298 | finally: 299 | _exists.__exit__(None, None, None) 300 | assert develop == ['/normal/develop', '/develop/with/slash/', 'src/pkg.bar'] 301 | 302 | def testMissingSourceSection(self, buildout, extension): 303 | del buildout['sources'] 304 | assert extension.get_sources() == {} 305 | 306 | 307 | class TestExtension: 308 | def testConfigCreated(self, tempdir): 309 | from mr.developer.extension import extension 310 | buildout = MockBuildout(dict( 311 | buildout=dict( 312 | directory=tempdir, 313 | parts=''), 314 | sources={})) 315 | extension(buildout) 316 | assert '.mr.developer.cfg' in os.listdir(tempdir) 317 | 318 | 319 | class TestSourcesDir: 320 | def test_sources_dir_option_set_if_missing(self, tempdir): 321 | buildout = MockBuildout(dict( 322 | buildout={ 323 | 'directory': tempdir, 324 | 'parts': ''}, 325 | sources={}, 326 | )) 327 | ext = Extension(buildout) 328 | assert 'sources-dir' not in buildout['buildout'] 329 | ext() 330 | assert buildout['buildout']['sources-dir'] == os.path.join( 331 | tempdir, 'src') 332 | 333 | def test_sources_dir_created(self, tempdir): 334 | buildout = MockBuildout(dict( 335 | buildout={ 336 | 'directory': tempdir, 337 | 'parts': '', 338 | 'sources-dir': 'develop'}, 339 | sources={}, 340 | )) 341 | assert 'develop' not in os.listdir(tempdir) 342 | ext = Extension(buildout) 343 | ext() 344 | assert 'develop' in os.listdir(tempdir) 345 | assert ext.get_sources_dir() == tempdir['develop'] 346 | -------------------------------------------------------------------------------- /src/mr/developer/git.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from mr.developer import common 4 | import os 5 | import subprocess 6 | import re 7 | import sys 8 | 9 | 10 | logger = common.logger 11 | 12 | 13 | class GitError(common.WCError): 14 | pass 15 | 16 | 17 | class GitWorkingCopy(common.BaseWorkingCopy): 18 | """The git working copy. 19 | 20 | Now supports git 1.5 and 1.6+ in a single codebase. 21 | """ 22 | 23 | # the file protocol setting is only for testing, as it circumvents security 24 | # measures of default git settings 25 | _always_allow_file_protocol = False 26 | 27 | # TODO: make this configurable? It might not make sense however, as we 28 | # should make master and a lot of other conventional stuff configurable 29 | _upstream_name = "origin" 30 | 31 | def __init__(self, source): 32 | self.git_executable = common.which('git') 33 | if 'rev' in source and 'revision' in source: 34 | raise ValueError("The source definition of '%s' contains " 35 | "duplicate revision options." % source['name']) 36 | # 'rev' is canonical 37 | if 'revision' in source: 38 | source['rev'] = source['revision'] 39 | del source['revision'] 40 | if 'branch' in source and 'rev' in source: 41 | logger.error("Cannot specify both branch (%s) and rev/revision " 42 | "(%s) in source for %s", 43 | source['branch'], source['rev'], source['name']) 44 | sys.exit(1) 45 | super(GitWorkingCopy, self).__init__(source) 46 | 47 | @common.memoize 48 | def git_version(self): 49 | cmd = self.run_git(['--version']) 50 | stdout, stderr = cmd.communicate() 51 | if cmd.returncode != 0: 52 | logger.error("Could not determine git version") 53 | logger.error("'git --version' output was:\n%s\n%s" % (stdout, stderr)) 54 | sys.exit(1) 55 | 56 | m = re.search(r"git version (\d+)\.(\d+)(\.\d+)?(\.\d+)?", stdout) 57 | if m is None: 58 | logger.error("Unable to parse git version output") 59 | logger.error("'git --version' output was:\n%s\n%s" % (stdout, stderr)) 60 | sys.exit(1) 61 | version = m.groups() 62 | 63 | if version[3] is not None: 64 | version = ( 65 | int(version[0]), 66 | int(version[1]), 67 | int(version[2][1:]), 68 | int(version[3][1:]) 69 | ) 70 | elif version[2] is not None: 71 | version = ( 72 | int(version[0]), 73 | int(version[1]), 74 | int(version[2][1:]) 75 | ) 76 | else: 77 | version = (int(version[0]), int(version[1])) 78 | if version < (1, 5): 79 | logger.error( 80 | "Git version %s is unsupported, please upgrade", 81 | ".".join([str(v) for v in version])) 82 | sys.exit(1) 83 | return version 84 | 85 | @property 86 | def _remote_branch_prefix(self): 87 | version = self.git_version() 88 | if version < (1, 6, 3): 89 | return self._upstream_name 90 | else: 91 | return 'remotes/%s' % self._upstream_name 92 | 93 | def run_git(self, commands, **kwargs): 94 | commands.insert(0, self.git_executable) 95 | kwargs['stdout'] = subprocess.PIPE 96 | kwargs['stderr'] = subprocess.PIPE 97 | # This should ease things up when multiple processes are trying to send 98 | # back to the main one large chunks of output 99 | kwargs['bufsize'] = -1 100 | kwargs['universal_newlines'] = True 101 | return subprocess.Popen(commands, **kwargs) 102 | 103 | def git_merge_rbranch(self, stdout_in, stderr_in, accept_missing=False): 104 | path = self.source['path'] 105 | branch = self.source.get('branch', 'master') 106 | 107 | cmd = self.run_git(["branch", "-a"], cwd=path) 108 | stdout, stderr = cmd.communicate() 109 | if cmd.returncode != 0: 110 | raise GitError("'git branch -a' failed.\n%s" % stderr) 111 | stdout_in += stdout 112 | stderr_in += stderr 113 | if not re.search(r"^(\*| ) %s$" % re.escape(branch), stdout, re.M): 114 | # The branch is not local. We should not have reached 115 | # this, unless no branch was specified and we guess wrong 116 | # that it should be master. 117 | if accept_missing: 118 | logger.info("No such branch %r", branch) 119 | return (stdout_in, stderr_in) 120 | else: 121 | logger.error("No such branch %r", branch) 122 | sys.exit(1) 123 | 124 | rbp = self._remote_branch_prefix 125 | cmd = self.run_git(["merge", "%s/%s" % (rbp, branch)], cwd=path) 126 | stdout, stderr = cmd.communicate() 127 | if cmd.returncode != 0: 128 | raise GitError("git merge of remote branch 'origin/%s' failed.\n%s" % (branch, stderr)) 129 | return (stdout_in + stdout, 130 | stderr_in + stderr) 131 | 132 | def git_checkout(self, **kwargs): 133 | name = self.source['name'] 134 | path = self.source['path'] 135 | url = self.source['url'] 136 | if os.path.exists(path): 137 | self.output((logger.info, "Skipped cloning of existing package '%s'." % name)) 138 | return 139 | msg = "Cloned '%s' with git" % name 140 | if "branch" in self.source: 141 | msg += " using branch '%s'" % self.source['branch'] 142 | msg += " from '%s'." % url 143 | self.output((logger.info, msg)) 144 | args = ["clone", "--quiet"] 145 | if 'depth' in self.source: 146 | args.extend(["--depth", self.source["depth"]]) 147 | if "branch" in self.source: 148 | args.extend(["-b", self.source["branch"]]) 149 | args.extend([url, path]) 150 | cmd = self.run_git(args) 151 | stdout, stderr = cmd.communicate() 152 | if cmd.returncode != 0: 153 | raise GitError("git cloning of '%s' failed.\n%s" % (name, stderr)) 154 | if 'rev' in self.source: 155 | stdout, stderr = self.git_switch_branch(stdout, stderr) 156 | if 'pushurl' in self.source: 157 | stdout, stderr = self.git_set_pushurl(stdout, stderr) 158 | 159 | update_git_submodules = self.source.get('submodules', kwargs['submodules']) 160 | if update_git_submodules in ['always', 'checkout']: 161 | stdout, stderr, initialized = self.git_init_submodules(stdout, stderr) 162 | # Update only new submodules that we just registered. this is for safety reasons 163 | # as git submodule update on modified submodules may cause code loss 164 | for submodule in initialized: 165 | stdout, stderr = self.git_update_submodules(stdout, stderr, submodule=submodule) 166 | self.output((logger.info, "Initialized '%s' submodule at '%s' with git." % (name, submodule))) 167 | 168 | if kwargs.get('verbose', False): 169 | return stdout 170 | 171 | def git_switch_branch(self, stdout_in, stderr_in, accept_missing=False): 172 | """Switch branches. 173 | 174 | If accept_missing is True, we do not switch the branch if it 175 | is not there. Useful for switching back to master. 176 | """ 177 | path = self.source['path'] 178 | branch = self.source.get('branch', 'master') 179 | rbp = self._remote_branch_prefix 180 | cmd = self.run_git(["branch", "-a"], cwd=path) 181 | stdout, stderr = cmd.communicate() 182 | if cmd.returncode != 0: 183 | raise GitError("'git branch -a' failed.\n%s" % stderr) 184 | stdout_in += stdout 185 | stderr_in += stderr 186 | if 'rev' in self.source: 187 | # A tag or revision was specified instead of a branch 188 | argv = ["checkout", self.source['rev']] 189 | self.output((logger.info, "Switching to rev '%s'." % self.source['rev'])) 190 | elif re.search(r"^(\*| ) %s$" % re.escape(branch), stdout, re.M): 191 | # the branch is local, normal checkout will work 192 | argv = ["checkout", branch] 193 | self.output((logger.info, "Switching to branch '%s'." % branch)) 194 | elif re.search( 195 | "^ " + re.escape(rbp) + r"\/" + re.escape(branch) + "$", 196 | stdout, re.M): 197 | # the branch is not local, normal checkout won't work here 198 | rbranch = "%s/%s" % (rbp, branch) 199 | argv = ["checkout", "-b", branch, rbranch] 200 | self.output((logger.info, "Switching to remote branch '%s'." % rbranch)) 201 | elif accept_missing: 202 | self.output((logger.info, "No such branch %r", branch)) 203 | return (stdout_in + stdout, 204 | stderr_in + stderr) 205 | else: 206 | self.output((logger.error, "No such branch %r", branch)) 207 | sys.exit(1) 208 | # runs the checkout with predetermined arguments 209 | cmd = self.run_git(argv, cwd=path) 210 | stdout, stderr = cmd.communicate() 211 | if cmd.returncode != 0: 212 | raise GitError("git checkout of branch '%s' failed.\n%s" % (branch, stderr)) 213 | return (stdout_in + stdout, 214 | stderr_in + stderr) 215 | 216 | def git_update(self, **kwargs): 217 | name = self.source['name'] 218 | path = self.source['path'] 219 | self.output((logger.info, "Updated '%s' with git." % name)) 220 | # First we fetch. This should always be possible. 221 | argv = ["fetch"] 222 | cmd = self.run_git(argv, cwd=path) 223 | stdout, stderr = cmd.communicate() 224 | if cmd.returncode != 0: 225 | raise GitError("git fetch of '%s' failed.\n%s" % (name, stderr)) 226 | if 'rev' in self.source: 227 | stdout, stderr = self.git_switch_branch(stdout, stderr) 228 | elif 'branch' in self.source: 229 | stdout, stderr = self.git_switch_branch(stdout, stderr) 230 | stdout, stderr = self.git_merge_rbranch(stdout, stderr) 231 | else: 232 | # We may have specified a branch previously but not 233 | # anymore. In that case, we want to revert to master. 234 | stdout, stderr = self.git_switch_branch(stdout, stderr, accept_missing=True) 235 | stdout, stderr = self.git_merge_rbranch(stdout, stderr, accept_missing=True) 236 | 237 | update_git_submodules = self.source.get('submodules', kwargs['submodules']) 238 | if update_git_submodules in ['always']: 239 | stdout, stderr, initialized = self.git_init_submodules(stdout, stderr) 240 | # Update only new submodules that we just registered. this is for safety reasons 241 | # as git submodule update on modified subomdules may cause code loss 242 | for submodule in initialized: 243 | stdout, stderr = self.git_update_submodules(stdout, stderr, submodule=submodule) 244 | self.output((logger.info, "Initialized '%s' submodule at '%s' with git." % (name, submodule))) 245 | 246 | if kwargs.get('verbose', False): 247 | return stdout 248 | 249 | def checkout(self, **kwargs): 250 | name = self.source['name'] 251 | path = self.source['path'] 252 | update = self.should_update(**kwargs) 253 | if os.path.exists(path): 254 | if update: 255 | return self.update(**kwargs) 256 | elif self.matches(): 257 | self.output((logger.info, "Skipped checkout of existing package '%s'." % name)) 258 | else: 259 | self.output((logger.warning, "Checkout URL for existing package '%s' differs. Expected '%s'." % (name, self.source['url']))) 260 | else: 261 | return self.git_checkout(**kwargs) 262 | 263 | def status(self, **kwargs): 264 | path = self.source['path'] 265 | cmd = self.run_git(["status", "-s", "-b"], cwd=path) 266 | stdout, stderr = cmd.communicate() 267 | lines = stdout.strip().split('\n') 268 | if len(lines) == 1: 269 | if 'ahead' in lines[0]: 270 | status = 'ahead' 271 | else: 272 | status = 'clean' 273 | else: 274 | status = 'dirty' 275 | if kwargs.get('verbose', False): 276 | return status, stdout 277 | else: 278 | return status 279 | 280 | def matches(self): 281 | name = self.source['name'] 282 | path = self.source['path'] 283 | # This is the old matching code: it does not work on 1.5 due to the 284 | # lack of the -v switch 285 | cmd = self.run_git(["remote", "show", "-n", self._upstream_name], 286 | cwd=path) 287 | stdout, stderr = cmd.communicate() 288 | if cmd.returncode != 0: 289 | raise GitError("git remote of '%s' failed.\n%s" % (name, stderr)) 290 | return (self.source['url'] in stdout.split()) 291 | 292 | def update(self, **kwargs): 293 | name = self.source['name'] 294 | if not self.matches(): 295 | self.output((logger.warning, "Can't update package '%s' because its URL doesn't match." % name)) 296 | if self.status() != 'clean' and not kwargs.get('force', False): 297 | raise GitError("Can't update package '%s' because it's dirty." % name) 298 | return self.git_update(**kwargs) 299 | 300 | def git_set_pushurl(self, stdout_in, stderr_in): 301 | cmd = self.run_git( 302 | [ 303 | "config", 304 | "remote.%s.pushurl" % self._upstream_name, 305 | self.source['pushurl']], 306 | cwd=self.source['path']) 307 | stdout, stderr = cmd.communicate() 308 | 309 | if cmd.returncode != 0: 310 | raise GitError("git config remote.%s.pushurl %s \nfailed.\n" % (self._upstream_name, self.source['pushurl'])) 311 | return (stdout_in + stdout, stderr_in + stderr) 312 | 313 | def git_init_submodules(self, stdout_in, stderr_in): 314 | cmd = self.run_git( 315 | [ 316 | 'submodule', 317 | 'init'], 318 | cwd=self.source['path']) 319 | stdout, stderr = cmd.communicate() 320 | if cmd.returncode != 0: 321 | raise GitError("git submodule init failed.\n") 322 | output = stdout 323 | if not output: 324 | output = stderr 325 | initialized_submodules = re.findall( 326 | r'\s+[\'"](.*?)[\'"]\s+\(.+\)', 327 | output) 328 | return (stdout_in + stdout, stderr_in + stderr, initialized_submodules) 329 | 330 | def git_update_submodules(self, stdout_in, stderr_in, submodule='all'): 331 | params = ['submodule', 332 | 'update'] 333 | if self._always_allow_file_protocol: 334 | params[0:0] = ["-c", "protocol.file.allow=always"] 335 | if submodule != 'all': 336 | params.append(submodule) 337 | cmd = self.run_git( 338 | params, 339 | cwd=self.source['path']) 340 | stdout, stderr = cmd.communicate() 341 | if cmd.returncode != 0: 342 | raise GitError("git submodule update failed.\n") 343 | return (stdout_in + stdout, stderr_in + stderr) 344 | -------------------------------------------------------------------------------- /src/mr/developer/tests/test_git_submodules.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | from mr.developer.extension import Source 3 | from mr.developer.tests.utils import GitRepo 4 | import os 5 | 6 | 7 | class TestGitSubmodules: 8 | def testCheckoutWithSubmodule(self, develop, mkgitrepo, src): 9 | """ 10 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it 11 | """ 12 | from mr.developer.commands import CmdCheckout 13 | submodule_name = 'submodule_a' 14 | submodule_a = mkgitrepo(submodule_name) 15 | submodule_a.add_file('foo') 16 | egg = mkgitrepo('egg') 17 | egg.add_file('bar') 18 | egg.add_submodule(submodule_a, submodule_name) 19 | 20 | develop.sources = { 21 | 'egg': Source( 22 | kind='git', 23 | name='egg', 24 | url=egg.url, 25 | path=src['egg'])} 26 | _log = patch('mr.developer.git.logger') 27 | log = _log.__enter__() 28 | try: 29 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 30 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 31 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo')) 32 | assert log.method_calls == [ 33 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 34 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {})] 35 | finally: 36 | _log.__exit__(None, None, None) 37 | 38 | def testCheckoutWithTwoSubmodules(self, develop, mkgitrepo, src): 39 | """ 40 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' 41 | and a submodule 'submodule_b' in it. 42 | """ 43 | from mr.developer.commands import CmdCheckout 44 | submodule_name = 'submodule_a' 45 | submodule = mkgitrepo(submodule_name) 46 | submodule_b_name = 'submodule_b' 47 | submodule_b = mkgitrepo(submodule_b_name) 48 | 49 | submodule.add_file('foo') 50 | submodule_b.add_file('foo_b') 51 | egg = mkgitrepo('egg') 52 | egg.add_file('bar') 53 | egg.add_submodule(submodule, submodule_name) 54 | egg.add_submodule(submodule_b, submodule_b_name) 55 | 56 | develop.sources = { 57 | 'egg': Source( 58 | kind='git', 59 | name='egg', 60 | url=egg.url, 61 | path=src['egg'])} 62 | _log = patch('mr.developer.git.logger') 63 | log = _log.__enter__() 64 | try: 65 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 66 | assert set(os.listdir(src['egg'])) == set(('submodule_a', 'submodule_b', '.git', 'bar', '.gitmodules')) 67 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo')) 68 | assert set(os.listdir(src['egg/%s' % submodule_b_name])) == set(('.git', 'foo_b')) 69 | assert log.method_calls == [ 70 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 71 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {}), 72 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_b_name,), {})] 73 | finally: 74 | _log.__exit__(None, None, None) 75 | 76 | def testUpdateWithSubmodule(self, develop, mkgitrepo, src): 77 | """ 78 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it. 79 | Add a new 'submodule_b' to 'egg' and check it succesfully initializes. 80 | """ 81 | from mr.developer.commands import CmdCheckout, CmdUpdate 82 | submodule_name = 'submodule_a' 83 | submodule = mkgitrepo(submodule_name) 84 | submodule.add_file('foo') 85 | egg = mkgitrepo('egg') 86 | egg.add_file('bar') 87 | egg.add_submodule(submodule, submodule_name) 88 | 89 | develop.sources = { 90 | 'egg': Source( 91 | kind='git', 92 | name='egg', 93 | url=egg.url, 94 | path=src['egg'])} 95 | _log = patch('mr.developer.git.logger') 96 | log = _log.__enter__() 97 | try: 98 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 99 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 100 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo')) 101 | assert log.method_calls == [ 102 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 103 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {})] 104 | finally: 105 | _log.__exit__(None, None, None) 106 | 107 | submodule_b_name = 'submodule_b' 108 | submodule_b = mkgitrepo(submodule_b_name) 109 | submodule_b.add_file('foo_b') 110 | egg.add_submodule(submodule_b, submodule_b_name) 111 | 112 | log = _log.__enter__() 113 | try: 114 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 115 | assert set(os.listdir(src['egg'])) == set(('submodule_a', 'submodule_b', '.git', 'bar', '.gitmodules')) 116 | assert set(os.listdir(src['egg/%s' % submodule_b_name])) == set(('.git', 'foo_b')) 117 | assert log.method_calls == [ 118 | ('info', ("Updated 'egg' with git.",), {}), 119 | ('info', ("Switching to branch 'master'.",), {}), 120 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_b_name,), {})] 121 | finally: 122 | _log.__exit__(None, None, None) 123 | 124 | def testCheckoutWithSubmodulesOptionNever(self, develop, mkgitrepo, src): 125 | """ 126 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it 127 | without initializing the submodule, restricted by global 'never' 128 | """ 129 | 130 | from mr.developer.commands import CmdCheckout 131 | submodule_name = 'submodule_a' 132 | submodule_a = mkgitrepo(submodule_name) 133 | submodule_a.add_file('foo') 134 | egg = mkgitrepo('egg') 135 | egg.add_file('bar') 136 | egg.add_submodule(submodule_a, submodule_name) 137 | 138 | develop.update_git_submodules = 'never' 139 | develop.sources = { 140 | 'egg': Source( 141 | kind='git', 142 | name='egg', 143 | url=egg.url, 144 | path=src['egg'])} 145 | _log = patch('mr.developer.git.logger') 146 | log = _log.__enter__() 147 | try: 148 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 149 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 150 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set() 151 | assert log.method_calls == [ 152 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {})] 153 | finally: 154 | _log.__exit__(None, None, None) 155 | 156 | def testCheckoutWithSubmodulesOptionNeverSourceAlways(self, develop, mkgitrepo, src): 157 | """ 158 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it 159 | and a module 'egg2' with the same submodule, initializing only the submodule 160 | on egg that has the 'always' option 161 | """ 162 | 163 | from mr.developer.commands import CmdCheckout 164 | submodule_name = 'submodule_a' 165 | submodule_a = mkgitrepo(submodule_name) 166 | submodule_a.add_file('foo') 167 | egg = mkgitrepo('egg') 168 | egg.add_file('bar') 169 | egg.add_submodule(submodule_a, submodule_name) 170 | 171 | egg2 = mkgitrepo('egg2') 172 | egg2.add_file('bar') 173 | egg2.add_submodule(submodule_a, submodule_name) 174 | 175 | develop.update_git_submodules = 'never' 176 | develop.sources = { 177 | 'egg': Source( 178 | kind='git', 179 | name='egg', 180 | url=egg.url, 181 | path=src['egg'], 182 | submodules='always'), 183 | 'egg2': Source( 184 | kind='git', 185 | name='egg2', 186 | url=egg2.url, 187 | path=src['egg2'])} 188 | _log = patch('mr.developer.git.logger') 189 | log = _log.__enter__() 190 | try: 191 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 192 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 193 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('foo', '.git')) 194 | assert set(os.listdir(src['egg2'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 195 | assert set(os.listdir(src['egg2/%s' % submodule_name])) == set() 196 | 197 | assert log.method_calls == [ 198 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 199 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {}), 200 | ('info', ("Cloned 'egg2' with git from '%s'." % egg2.url,), {})] 201 | finally: 202 | _log.__exit__(None, None, None) 203 | 204 | def testCheckoutWithSubmodulesOptionAlwaysSourceNever(self, develop, mkgitrepo, src): 205 | """ 206 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it 207 | and a module 'egg2' with the same submodule, not initializing the submodule 208 | on egg2 that has the 'never' option 209 | 210 | """ 211 | from mr.developer.commands import CmdCheckout 212 | submodule_name = 'submodule_a' 213 | submodule_a = mkgitrepo(submodule_name) 214 | submodule_a.add_file('foo') 215 | egg = mkgitrepo('egg') 216 | egg.add_file('bar') 217 | egg.add_submodule(submodule_a, submodule_name) 218 | 219 | egg2 = mkgitrepo('egg2') 220 | egg2.add_file('bar') 221 | egg2.add_submodule(submodule_a, submodule_name) 222 | 223 | develop.sources = { 224 | 'egg': Source( 225 | kind='git', 226 | name='egg', 227 | url=egg.url, 228 | path=src['egg']), 229 | 'egg2': Source( 230 | kind='git', 231 | name='egg2', 232 | url=egg2.url, 233 | path=src['egg2'], 234 | submodules='never')} 235 | _log = patch('mr.developer.git.logger') 236 | log = _log.__enter__() 237 | try: 238 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 239 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 240 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('foo', '.git')) 241 | assert set(os.listdir(src['egg2'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 242 | assert set(os.listdir(src['egg2/%s' % submodule_name])) == set() 243 | 244 | assert log.method_calls == [ 245 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 246 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {}), 247 | ('info', ("Cloned 'egg2' with git from '%s'." % egg2.url,), {})] 248 | finally: 249 | _log.__exit__(None, None, None) 250 | 251 | def testUpdateWithSubmoduleCheckout(self, develop, mkgitrepo, src): 252 | """ 253 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it. 254 | Add a new 'submodule_b' to 'egg' and check it doesn't get initialized. 255 | """ 256 | from mr.developer.commands import CmdCheckout, CmdUpdate 257 | submodule_name = 'submodule_a' 258 | submodule = mkgitrepo(submodule_name) 259 | submodule.add_file('foo') 260 | egg = mkgitrepo('egg') 261 | egg.add_file('bar') 262 | egg.add_submodule(submodule, submodule_name) 263 | 264 | develop.sources = { 265 | 'egg': Source( 266 | kind='git', 267 | name='egg', 268 | url=egg.url, 269 | path=src['egg'], 270 | submodules='checkout')} 271 | _log = patch('mr.developer.git.logger') 272 | log = _log.__enter__() 273 | try: 274 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 275 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 276 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo')) 277 | assert log.method_calls == [ 278 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 279 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {})] 280 | finally: 281 | _log.__exit__(None, None, None) 282 | 283 | submodule_b_name = 'submodule_b' 284 | submodule_b = mkgitrepo(submodule_b_name) 285 | submodule_b.add_file('foo_b') 286 | egg.add_submodule(submodule_b, submodule_b_name) 287 | 288 | log = _log.__enter__() 289 | try: 290 | CmdUpdate(develop)(develop.parser.parse_args(['up', 'egg'])) 291 | assert set(os.listdir(src['egg'])) == set(('submodule_a', 'submodule_b', '.git', 'bar', '.gitmodules')) 292 | assert set(os.listdir(src['egg/%s' % submodule_b_name])) == set() 293 | assert log.method_calls == [ 294 | ('info', ("Updated 'egg' with git.",), {}), 295 | ('info', ("Switching to branch 'master'.",), {})] 296 | finally: 297 | _log.__exit__(None, None, None) 298 | 299 | def testUpdateWithSubmoduleDontUpdatePreviousSubmodules(self, develop, mkgitrepo, src): 300 | """ 301 | Tests the checkout of a module 'egg' with a submodule 'submodule_a' in it. 302 | Commits changes in the detached submodule, and checks update didn't break 303 | the changes. 304 | """ 305 | from mr.developer.commands import CmdCheckout, CmdUpdate 306 | submodule_name = 'submodule_a' 307 | submodule = mkgitrepo(submodule_name) 308 | submodule.add_file('foo') 309 | egg = mkgitrepo('egg') 310 | egg.add_file('bar') 311 | egg.add_submodule(submodule, submodule_name) 312 | 313 | develop.sources = { 314 | 'egg': Source( 315 | kind='git', 316 | name='egg', 317 | url=egg.url, 318 | path=src['egg'])} 319 | _log = patch('mr.developer.git.logger') 320 | log = _log.__enter__() 321 | try: 322 | CmdCheckout(develop)(develop.parser.parse_args(['co', 'egg'])) 323 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 324 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo')) 325 | assert log.method_calls == [ 326 | ('info', ("Cloned 'egg' with git from '%s'." % egg.url,), {}), 327 | ('info', ("Initialized 'egg' submodule at '%s' with git." % submodule_name,), {})] 328 | finally: 329 | _log.__exit__(None, None, None) 330 | 331 | repo = GitRepo(src['egg/%s' % submodule_name]) 332 | repo.setup_user() 333 | repo.add_file('newfile') 334 | 335 | log = _log.__enter__() 336 | try: 337 | CmdUpdate(develop)(develop.parser.parse_args(['up', '-f', 'egg'])) 338 | assert set(os.listdir(src['egg'])) == set(('submodule_a', '.git', 'bar', '.gitmodules')) 339 | assert set(os.listdir(src['egg/%s' % submodule_name])) == set(('.git', 'foo', 'newfile')) 340 | assert log.method_calls == [ 341 | ('info', ("Updated 'egg' with git.",), {}), 342 | ('info', ("Switching to branch 'master'.",), {})] 343 | finally: 344 | _log.__exit__(None, None, None) 345 | -------------------------------------------------------------------------------- /src/mr/developer/svn.py: -------------------------------------------------------------------------------- 1 | from mr.developer import common 2 | from mr.developer.compat import b, s 3 | try: 4 | from urllib.parse import urlparse, urlunparse 5 | except ImportError: 6 | from urlparse import urlparse, urlunparse 7 | try: 8 | import xml.etree.ElementTree as etree 9 | etree # shutup pyflakes 10 | except ImportError: 11 | import elementtree.ElementTree as etree 12 | import getpass 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | 18 | 19 | try: 20 | raw_input = raw_input 21 | except NameError: 22 | raw_input = input 23 | 24 | 25 | logger = common.logger 26 | 27 | 28 | class SVNError(common.WCError): 29 | pass 30 | 31 | 32 | class SVNAuthorizationError(SVNError): 33 | pass 34 | 35 | 36 | class SVNCertificateError(SVNError): 37 | pass 38 | 39 | 40 | class SVNCertificateRejectedError(SVNError): 41 | pass 42 | 43 | 44 | _svn_version_warning = False 45 | 46 | 47 | class SVNWorkingCopy(common.BaseWorkingCopy): 48 | _svn_info_cache = {} 49 | _svn_auth_cache = {} 50 | _svn_cert_cache = {} 51 | 52 | @classmethod 53 | def _clear_caches(klass): 54 | klass._svn_info_cache.clear() 55 | klass._svn_auth_cache.clear() 56 | klass._svn_cert_cache.clear() 57 | 58 | def _normalized_url_rev(self): 59 | url = urlparse(self.source['url']) 60 | rev = None 61 | if '@' in url[2]: 62 | path, rev = url[2].split('@', 1) 63 | url = list(url) 64 | url[2] = path 65 | if 'rev' in self.source and 'revision' in self.source: 66 | raise ValueError("The source definition of '%s' contains duplicate revision options." % self.source['name']) 67 | if rev is not None and ('rev' in self.source or 'revision' in self.source): 68 | raise ValueError("The url of '%s' contains a revision and there is an additional revision option." % self.source['name']) 69 | elif rev is None: 70 | rev = self.source.get('revision', self.source.get('rev')) 71 | return urlunparse(url), rev 72 | 73 | def __init__(self, *args, **kwargs): 74 | common.BaseWorkingCopy.__init__(self, *args, **kwargs) 75 | self.svn_executable = common.which("svn") 76 | self._svn_check_version() 77 | 78 | def _svn_check_version(self): 79 | global _svn_version_warning 80 | try: 81 | cmd = subprocess.Popen([self.svn_executable, "--version"], 82 | stdout=subprocess.PIPE, 83 | stderr=subprocess.PIPE) 84 | except OSError: 85 | if getattr(sys.exc_info()[1], 'errno', None) == 2: 86 | logger.error("Couldn't find 'svn' executable on your PATH.") 87 | sys.exit(1) 88 | raise 89 | stdout, stderr = cmd.communicate() 90 | lines = stdout.split(b('\n')) 91 | version = None 92 | if len(lines): 93 | version = re.search(b(r'(\d+)\.(\d+)(\.\d+)?'), lines[0]) 94 | if version is not None: 95 | version = version.groups() 96 | if len(version) == 3: 97 | version = (int(version[0]), int(version[1]), int(version[2][1:])) 98 | else: 99 | version = (int(version[0]), int(version[1])) 100 | if (cmd.returncode != 0) or (version is None): 101 | logger.error("Couldn't determine the version of 'svn' command.") 102 | logger.error("Subversion output:\n%s\n%s" % (s(stdout), s(stderr))) 103 | sys.exit(1) 104 | if (version < (1, 5)) and not _svn_version_warning: 105 | logger.warning("The installed 'svn' command is too old. Expected 1.5 or newer, got %s." % ".".join([str(x) for x in version])) 106 | _svn_version_warning = True 107 | 108 | def _svn_auth_get(self, url): 109 | for root in self._svn_auth_cache: 110 | if url.startswith(root): 111 | return self._svn_auth_cache[root] 112 | 113 | def _svn_accept_invalid_cert_get(self, url): 114 | for root in self._svn_cert_cache: 115 | if url.startswith(root): 116 | return self._svn_cert_cache[root] 117 | 118 | def _svn_error_wrapper(self, f, **kwargs): 119 | count = 4 120 | while count: 121 | count = count - 1 122 | try: 123 | return f(**kwargs) 124 | except SVNAuthorizationError: 125 | lines = sys.exc_info()[1].args[0].split('\n') 126 | root = lines[-1].split('(')[-1].strip(')') 127 | before = self._svn_auth_cache.get(root) 128 | common.output_lock.acquire() 129 | common.input_lock.acquire() 130 | after = self._svn_auth_cache.get(root) 131 | if before != after: 132 | count = count + 1 133 | common.input_lock.release() 134 | common.output_lock.release() 135 | continue 136 | print("Authorization needed for '%s' at '%s'" % (self.source['name'], self.source['url'])) 137 | user = raw_input("Username: ") 138 | passwd = getpass.getpass("Password: ") 139 | self._svn_auth_cache[root] = dict( 140 | user=user, 141 | passwd=passwd, 142 | ) 143 | common.input_lock.release() 144 | common.output_lock.release() 145 | except SVNCertificateError: 146 | lines = sys.exc_info()[1].args[0].split('\n') 147 | root = lines[-1].split('(')[-1].strip(')') 148 | before = self._svn_cert_cache.get(root) 149 | common.output_lock.acquire() 150 | common.input_lock.acquire() 151 | after = self._svn_cert_cache.get(root) 152 | if before != after: 153 | count = count + 1 154 | common.input_lock.release() 155 | common.output_lock.release() 156 | continue 157 | print("\n".join(lines[:-1])) 158 | while 1: 159 | answer = raw_input("(R)eject or accept (t)emporarily? ") 160 | if answer.lower() in ['r', 't']: 161 | break 162 | else: 163 | print("Invalid answer, type 'r' for reject or 't' for temporarily.") 164 | if answer == 'r': 165 | self._svn_cert_cache[root] = False 166 | else: 167 | self._svn_cert_cache[root] = True 168 | count = count + 1 169 | common.input_lock.release() 170 | common.output_lock.release() 171 | 172 | def _svn_checkout(self, **kwargs): 173 | name = self.source['name'] 174 | path = self.source['path'] 175 | url = self.source['url'] 176 | args = [self.svn_executable, "checkout", url, path] 177 | stdout, stderr, returncode = self._svn_communicate(args, url, **kwargs) 178 | if returncode != 0: 179 | raise SVNError("Subversion checkout for '%s' failed.\n%s" % (name, s(stderr))) 180 | if kwargs.get('verbose', False): 181 | return s(stdout) 182 | 183 | def _svn_communicate(self, args, url, **kwargs): 184 | auth = self._svn_auth_get(url) 185 | if auth is not None: 186 | args[2:2] = ["--username", auth['user'], 187 | "--password", auth['passwd']] 188 | if not kwargs.get('verbose', False): 189 | args[2:2] = ["--quiet"] 190 | accept_invalid_cert = self._svn_accept_invalid_cert_get(url) 191 | if 'always_accept_server_certificate' in kwargs: 192 | if kwargs['always_accept_server_certificate']: 193 | accept_invalid_cert = True 194 | if accept_invalid_cert is True: 195 | args[2:2] = ["--trust-server-cert"] 196 | elif accept_invalid_cert is False: 197 | raise SVNCertificateRejectedError("Server certificate rejected by user.") 198 | args[2:2] = ["--no-auth-cache"] 199 | interactive_args = args[:] 200 | args[2:2] = ["--non-interactive"] 201 | cmd = subprocess.Popen(args, 202 | stdout=subprocess.PIPE, 203 | stderr=subprocess.PIPE) 204 | stdout, stderr = cmd.communicate() 205 | if cmd.returncode != 0: 206 | lines = stderr.strip().split(b('\n')) 207 | if 'authorization failed' in lines[-1] or 'Could not authenticate to server' in lines[-1]: 208 | raise SVNAuthorizationError(stderr.strip()) 209 | if 'Server certificate verification failed: issuer is not trusted' in lines[-1]: 210 | cmd = subprocess.Popen(interactive_args, 211 | stdin=subprocess.PIPE, 212 | stdout=subprocess.PIPE, 213 | stderr=subprocess.PIPE) 214 | stdout, stderr = cmd.communicate('t') 215 | raise SVNCertificateError(stderr.strip()) 216 | return stdout, stderr, cmd.returncode 217 | 218 | def _svn_info(self): 219 | name = self.source['name'] 220 | if name in self._svn_info_cache: 221 | return self._svn_info_cache[name] 222 | path = self.source['path'] 223 | cmd = subprocess.Popen([self.svn_executable, "info", "--non-interactive", "--xml", 224 | path], 225 | stdout=subprocess.PIPE, 226 | stderr=subprocess.PIPE) 227 | stdout, stderr = cmd.communicate() 228 | if cmd.returncode != 0: 229 | raise SVNError("Subversion info for '%s' failed.\n%s" % (name, s(stderr))) 230 | info = etree.fromstring(stdout) 231 | result = {} 232 | entry = info.find('entry') 233 | if entry is not None: 234 | rev = entry.attrib.get('revision') 235 | if rev is not None: 236 | result['revision'] = rev 237 | info_url = entry.find('url') 238 | if info_url is not None: 239 | result['url'] = info_url.text 240 | entry = info.find('entry') 241 | if entry is not None: 242 | root = entry.find('root') 243 | if root is not None: 244 | result['root'] = root.text 245 | self._svn_info_cache[name] = result 246 | return result 247 | 248 | def _svn_switch(self, **kwargs): 249 | name = self.source['name'] 250 | path = self.source['path'] 251 | url, rev = self._normalized_url_rev() 252 | args = [self.svn_executable, "switch", url, path] 253 | if rev is not None and not rev.startswith('>'): 254 | args.insert(2, '-r%s' % rev) 255 | stdout, stderr, returncode = self._svn_communicate(args, url, **kwargs) 256 | if returncode != 0: 257 | raise SVNError("Subversion switch of '%s' failed.\n%s" % (name, s(stderr))) 258 | if kwargs.get('verbose', False): 259 | return s(stdout) 260 | 261 | def _svn_update(self, **kwargs): 262 | name = self.source['name'] 263 | path = self.source['path'] 264 | url, rev = self._normalized_url_rev() 265 | args = [self.svn_executable, "update", path] 266 | if rev is not None and not rev.startswith('>'): 267 | args.insert(2, '-r%s' % rev) 268 | stdout, stderr, returncode = self._svn_communicate(args, url, **kwargs) 269 | if returncode != 0: 270 | raise SVNError("Subversion update of '%s' failed.\n%s" % (name, s(stderr))) 271 | if kwargs.get('verbose', False): 272 | return s(stdout) 273 | 274 | def svn_checkout(self, **kwargs): 275 | name = self.source['name'] 276 | path = self.source['path'] 277 | if os.path.exists(path): 278 | self.output((logger.info, "Skipped checkout of existing package '%s'." % name)) 279 | return 280 | self.output((logger.info, "Checked out '%s' with subversion." % name)) 281 | return self._svn_error_wrapper(self._svn_checkout, **kwargs) 282 | 283 | def svn_switch(self, **kwargs): 284 | name = self.source['name'] 285 | self.output((logger.info, "Switched '%s' with subversion." % name)) 286 | return self._svn_error_wrapper(self._svn_switch, **kwargs) 287 | 288 | def svn_update(self, **kwargs): 289 | name = self.source['name'] 290 | self.output((logger.info, "Updated '%s' with subversion." % name)) 291 | return self._svn_error_wrapper(self._svn_update, **kwargs) 292 | 293 | def checkout(self, **kwargs): 294 | name = self.source['name'] 295 | path = self.source['path'] 296 | update = self.should_update(**kwargs) 297 | if os.path.exists(path): 298 | matches = self.matches() 299 | if matches: 300 | if update: 301 | self.update(**kwargs) 302 | else: 303 | self.output((logger.info, "Skipped checkout of existing package '%s'." % name)) 304 | else: 305 | if self.status() == 'clean': 306 | return self.svn_switch(**kwargs) 307 | else: 308 | url = self._svn_info().get('url', '') 309 | if url: 310 | msg = "The current checkout of '%s' is from '%s'." % (name, url) 311 | msg += "\nCan't switch package to '%s' because it's dirty." % (self.source['url']) 312 | else: 313 | msg = "Can't switch package '%s' to '%s' because it's dirty." % (name, self.source['url']) 314 | raise SVNError(msg) 315 | else: 316 | return self.svn_checkout(**kwargs) 317 | 318 | def matches(self): 319 | info = self._svn_info() 320 | url, rev = self._normalized_url_rev() 321 | if url.endswith('/'): 322 | url = url[:-1] 323 | if rev is None: 324 | rev = info.get('revision') 325 | if rev.startswith('>='): 326 | return (info.get('url') == url) and (int(info.get('revision')) >= int(rev[2:])) 327 | elif rev.startswith('>'): 328 | return (info.get('url') == url) and (int(info.get('revision')) > int(rev[1:])) 329 | else: 330 | return (info.get('url') == url) and (info.get('revision') == rev) 331 | 332 | def status(self, **kwargs): 333 | name = self.source['name'] 334 | path = self.source['path'] 335 | cmd = subprocess.Popen([self.svn_executable, "status", "--xml", path], 336 | stdout=subprocess.PIPE, 337 | stderr=subprocess.PIPE) 338 | stdout, stderr = cmd.communicate() 339 | if cmd.returncode != 0: 340 | raise SVNError("Subversion status for '%s' failed.\n%s" % (name, s(stderr))) 341 | info = etree.fromstring(stdout) 342 | clean = True 343 | for target in info.findall('target'): 344 | for entry in target.findall('entry'): 345 | status = entry.find('wc-status') 346 | if status is not None and status.get('item') != 'external': 347 | clean = False 348 | break 349 | if clean: 350 | status = 'clean' 351 | else: 352 | status = 'dirty' 353 | if kwargs.get('verbose', False): 354 | cmd = subprocess.Popen([self.svn_executable, "status", path], 355 | stdout=subprocess.PIPE, 356 | stderr=subprocess.PIPE) 357 | stdout, stderr = cmd.communicate() 358 | if cmd.returncode != 0: 359 | raise SVNError("Subversion status for '%s' failed.\n%s" % (name, s(stderr))) 360 | return status, s(stdout) 361 | else: 362 | return status 363 | 364 | def update(self, **kwargs): 365 | name = self.source['name'] 366 | force = kwargs.get('force', False) 367 | status = self.status() 368 | if not self.matches(): 369 | if force or status == 'clean': 370 | return self.svn_switch(**kwargs) 371 | else: 372 | raise SVNError("Can't switch package '%s' because it's dirty." % name) 373 | if status != 'clean' and not force: 374 | raise SVNError("Can't update package '%s' because it's dirty." % name) 375 | return self.svn_update(**kwargs) 376 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. contents:: :depth: 1 2 | 3 | Introduction 4 | ============ 5 | 6 | .. figure:: https://fschulze.github.io/mr.developer/xkcd-buildout.png 7 | :figwidth: image 8 | 9 | Let Mr. Developer help you win the everlasting buildout battle! 10 | 11 | (Remixed by Matt Hamilton, original from https://xkcd.com/303) 12 | 13 | **mr.developer** is a `zc.buildout`_ extension that makes it easy to work with 14 | buildouts containing lots of packages, of which you only want to develop some. 15 | The basic idea comes from Wichert Akkerman's plonenext effort. 16 | 17 | .. _`zc.buildout`: https://pypi.org/project/zc.buildout/ 18 | 19 | 20 | Usage 21 | ===== 22 | 23 | Add ``mr.developer`` to the ``extensions`` entry in your ``[buildout]`` 24 | section:: 25 | 26 | [buildout] 27 | extensions = mr.developer 28 | 29 | This enables additional ``[buildout]`` options: 30 | 31 | ``sources`` 32 | This specifies the name of a section which lists the repository 33 | information for your packages. Defaults to ``sources``. 34 | 35 | ``sources-dir`` 36 | This specifies the default directory where your development packages will 37 | be placed. Defaults to ``src``. 38 | 39 | ``auto-checkout`` 40 | This specifies the names of packages which should be checked out during 41 | buildout. Packages already checked out are skipped. You can use ``*`` as 42 | a wildcard for all packages in ``sources``. 43 | 44 | ``always-checkout`` 45 | This defaults to ``false``. If it's ``true``, then all packages specified 46 | by ``auto-checkout`` and currently in develop mode are updated during each 47 | buildout run. If set to ``force``, then packages are updated even when 48 | they are dirty instead of asking interactively. 49 | 50 | ``update-git-submodules`` 51 | This defaults to ``always``. If it's ``always``, then submodules present 52 | in each package in develompent will be registered and updated on checkout and 53 | new ones on updates via the develop command. If you don't want to initialize any submodule, 54 | set value to ``never``. If you set the value to ``checkout``, 55 | code inside submodules will be pulled only the first time, so the ``develop up`` command 56 | will leave the submodule empty. Note that update only initializes 57 | new submodules, it doesn't pull newest code from original submodule repo. 58 | 59 | ``always-accept-server-certificate`` 60 | This defaults to ``false``. If it's ``true``, invalid server 61 | certificates are accepted without asking (for subversion repositories). 62 | 63 | ``mr.developer-threads`` 64 | This sets the number of threads used for parallel checkouts. See 65 | `Lockups during checkouts and updates`_ why you might need this. 66 | 67 | ``git-clone-depth`` 68 | This sets the git clone history size (git clone --depth parameter). 69 | Not really useful for development, but really useful on CI environments. 70 | The other big benefit is the speedup on cloning, 71 | as only few revisions are downloaded. 72 | Default is to get the full history. 73 | 74 | The format of entries in the ``[sources]`` section is:: 75 | 76 | [sources] 77 | name = kind url [key=value ...] 78 | 79 | Where individual parts are: 80 | 81 | ``name`` 82 | The package name. 83 | 84 | ``kind`` 85 | The kind of repository. Currently supported are ``svn``, 86 | ``hg``, ``git``, ``bzr``, ``darcs``, ``cvs``, or ``fs``. 87 | 88 | ``url`` 89 | The location of the repository. This value is specific to the version 90 | control system used. 91 | 92 | ``key=value`` 93 | You can add options for each individual package with this. No whitespace is 94 | allowed in ``key``, ``value``, and around the equal sign. For a 95 | description of the options see below. (*Note*: don't surround your ``key=value`` 96 | with square brackets: we only use ``[ ]`` here to indicate that it 97 | is optional to add options.) 98 | 99 | 100 | The per-package options are: 101 | 102 | Common options 103 | The ``path`` option allows you to set the base directory where the 104 | package will be checked out. The name of the package will be appended to 105 | the base path. If ``path`` is not set, ``sources-dir`` is used. 106 | 107 | With ``full-path`` you can set the directory where the package will be 108 | checked out. This is the actual destination, nothing will be added. As 109 | an example:: 110 | 111 | [sources] 112 | pkg = fs pkg full-path=/path/to/pkg 113 | 114 | The ``update`` option allows you to specify whether a package will be 115 | updated during buildout or not. If it's ``true``, then it will always be 116 | updated. If it's ``false``, then it will never be updated, even if the 117 | global ``always-checkout`` option is set. 118 | 119 | The ``egg`` option makes it possible to manage packages which are not 120 | eggs with ``egg=false``. All commands like ``update`` work as expected, 121 | but the package isn't added to the ``develop`` buildout option and the 122 | ``activate`` and ``deactivate`` commands skip the package. 123 | 124 | The ``newest_tag`` option allows you to checkout/update to the newest tag. 125 | Possible values of the option are "true" and "false". 126 | The ``newest_tag_prefix`` option allows you to limit the selection of tags to 127 | those which start with the prefix. 128 | These two options currently only work for ``cvs`` and ``hg``. 129 | 130 | ``svn`` 131 | The ``url`` is one of the urls supported by subversion. 132 | 133 | You can specify a url with a revision pin, like 134 | ``https://example.com/trunk@123``. 135 | 136 | You can also set the ``rev`` or ``revision`` option, which is either a pin 137 | like with ``rev=123`` or a minimum revision like ``rev=>123`` or 138 | ``rev=>=123``. When you set a minimum revision, the repository is updated 139 | when the current revision is lower. 140 | 141 | ``git`` 142 | The ``branch`` option allows you to use a specific branch instead of 143 | master. 144 | 145 | The ``rev`` option allows you to use a specific revision (usually a 146 | tag) instead of the HEAD. 147 | 148 | The ``pushurl`` options allows you to explicitly separate push url from pull 149 | url, configured by git config. 150 | 151 | The ``submodules`` option allows you to initialize existing submodules. 152 | Default value is controled by the buildout option ``update-git-submodules``. 153 | Possible values are the same described before in ``update-git-submodules`` option, 154 | 155 | The ``depth`` option allows to specify how much history you want to clone. 156 | This is the so called *shallow clones*. 157 | Note that this is mostly not useful at all for regular clones, 158 | on the other hand for one time usages (continuous integration for example) it makes clones much faster. 159 | This option overrides a general ``git-clone-depth`` value, 160 | so per-source depth can be specified. 161 | 162 | Note that the ``branch`` and ``rev`` option are mutually exclusive. 163 | 164 | ``hg`` 165 | The ``branch`` option allows you to use a specific branch instead of 166 | default. 167 | 168 | The ``rev`` option allows you to force a specific revision 169 | (hash, tag, branch) to be checked out after buildout 170 | 171 | ``bzr`` 172 | Currently no additional options. 173 | 174 | ``darcs`` 175 | Currently no additional options. 176 | 177 | ``cvs`` 178 | The ``cvs_root`` option can be used to override the setting of the $CVSROOT 179 | environment variable. 180 | The ``tag`` option forces checkout/update of the given tag instead of CVS 181 | HEAD. 182 | 183 | The ``tag_file`` option defines from which file tags will be read (in case of 184 | using ``newest_tag``). Default value is "setup.py". 185 | 186 | ``fs`` 187 | This allows you to add packages on the filesystem without a version 188 | control system, or with an unsupported one. You can activate and 189 | deactivate packages, but you don't get status info and can't update etc. 190 | 191 | The ``url`` needs to be the same as the ``name`` of the package. 192 | 193 | Here's an example of how your ``buildout.cfg`` may look like:: 194 | 195 | [buildout] 196 | extensions = mr.developer 197 | auto-checkout = my.package 198 | 199 | [sources] 200 | my.package = svn https://example.com/svn/my.package/trunk update=true 201 | some.other.package = git git://example.com/git/some.other.package.git 202 | 203 | When you run buildout, the script ``bin/develop`` is created in your 204 | buildout directory. With this script you can perform various actions on 205 | packages, like checking out their source code, without the need to know where 206 | the repositories are located. 207 | 208 | For help on what the script can do, run ``bin/develop help``. 209 | 210 | If you checked out the source code of a package, you must run buildout again. 211 | The new package will then be marked as a development egg and have its version 212 | pin cleared (if any). You can control the list of development eggs explicitely 213 | with the ``activate`` and ``deactivate`` commands. 214 | 215 | Any source where the path is a symlink is skipped during updates, as it is 216 | assumed, that the developer handles it manually. It is basically treated like 217 | a filesystem source. 218 | 219 | Configuration 220 | ============= 221 | 222 | You can add options to your global ``~/.buildout/mr.developer.cfg`` or local 223 | ``.mr.developer-options.cfg`` in your buildout. Don't ever edit 224 | ``.mr.developer.cfg`` in your buildout though, it's generated automatically. 225 | 226 | In the ``[mr.developer]`` section you have the following options. 227 | 228 | ``threads`` 229 | This sets the number of threads used for parallel checkouts. See 230 | `Lockups during checkouts and updates`_ why you might need this. 231 | 232 | In the ``[rewrites]`` section you can setup rewrite rules for sources. This is 233 | useful if you want to provide a buildout with sources to repositories which have 234 | different URLs for repositories which are read only for anonymous users. In that 235 | case developers can add a URL rewrite which automatically changes the URL to a 236 | writable repository. 237 | 238 | The rewrite rules can have multiple operators: 239 | 240 | ``=`` 241 | Matches the exact string. Useful to only operated on sources of a certain kind 242 | and similar things. This doesn't rewrite anything, but limits the rule. 243 | 244 | ``~=`` 245 | Matches with a regular expression. This doesn't rewrite anything, but limits 246 | the rule. 247 | 248 | ``~`` 249 | This runs a regular expression substitution. The substitute is read from the 250 | next line. You can use groups in the expression and the backslash syntax in 251 | the substitute. See `re.sub`_ documentation. 252 | 253 | .. _`re.sub`: https://docs.python.org/3/library/re.html#re.sub 254 | 255 | The following are useful examples:: 256 | 257 | [rewrites] 258 | 259 | plone_svn = 260 | url ~ ^http://svn.plone.org/svn/ 261 | https://svn.plone.org/svn/ 262 | 263 | github = 264 | url ~ ^https://github.com/ 265 | git@github.com: 266 | kind = git 267 | 268 | my_mrdeveloper_fork = 269 | url ~ fschulze(/mr.developer.git) 270 | me\1 271 | 272 | my_mrdeveloper_fork_alternate = 273 | url ~= fschulze/mr.developer.git 274 | url ~ fschulze/ 275 | me/ 276 | 277 | Extending 278 | ========= 279 | 280 | You can extend mr.developer to teach it new types of Working Copies 281 | and to add or modify existing commands. 282 | 283 | Mr.developer uses entrypoints for this. TO see examples on how to create entry 284 | points in detail, you can have a look at the existing entry points. 285 | 286 | Adding support for a new working copy type 287 | ------------------------------------------ 288 | Add en entry to the entry point group ``mr.developer.workingcopytypes``. 289 | They key of the entry is going to be used in the sources section of your 290 | buildout file. The value should be a class. 291 | The referenced class must implement the following methods:: 292 | 293 | - __init__(self, source) 294 | - matches(self) 295 | - checkout(self, **kwargs) 296 | - status(self, verbose=False, **kwargs) 297 | - update(self, **kwargs) 298 | 299 | The source is a dictionary like object. The source object provides the 300 | attributes:: 301 | 302 | - name 303 | - url 304 | - path 305 | 306 | In addition it contains all key value pairs one can define on the source line 307 | in buildout, and a methods ``exists`` that returns, whether the ``path`` 308 | already exists. 309 | 310 | The matches method must return, if the checkout at the ``path`` matches the 311 | repository at ``url`` 312 | 313 | The commands map to the commands mr.developer provides. To see the list of 314 | potential arguments, check the documentation of the commands. 315 | The commands ``checkout`` and update only return what they want to have printed 316 | out on stdout, the ``status`` command must check the verbose flag. If the 317 | verbose flag is set, it must return a tuple with what it wants to print out and 318 | what the VCS commands generated as output. 319 | 320 | All objects must have list ``_output`` which contains logging information. 321 | Please refer to existing implementations for how to fill this information. 322 | 323 | If your working copy Handler needs to throw an error, throw errors with 324 | ``mr.developer.common.WCError`` as a base clase. 325 | 326 | If you need to add new functionality for new commands or change behavior of 327 | something, try not to write a new working copy handler. Try your best your 328 | changes generically useful and get them into mr.developer. 329 | 330 | Adding a new command 331 | -------------------- 332 | Add an entry to the entry point group ``mr.developer.commands``. 333 | The key will be the name of the command itself. 334 | 335 | The referenced class must implement the following methods:: 336 | 337 | - __init__(self, develop) 338 | - __call__(self, args) 339 | 340 | An inversion of control happens here. On initalization, you receive a develop 341 | object that represents the class handling invocation of ``./bin/develop`` 342 | It is now your job to modify the attributes of the ``develop`` object to handle 343 | argument parsing. 344 | Create an ArgumentParser and add it to ``develop.parsers``. 345 | 346 | Upon calling, you can perform your actions. It is a good idea to subclass from 347 | ``mr.developer.commands.Command``. It provides convenient helper methods:: 348 | 349 | - get_workingcopies(self, sources) 350 | - get_packages(args, auto_checkout, develop, checked_out) 351 | 352 | ``get_workingcopies`` gives you a WorkingCopies object that will delegate all 353 | your working copy actions to the right working copy handler. 354 | 355 | ``get_packages`` is a little helper to get sources filterd by the rules. 356 | ``args`` can be one or more regular expression filtr on source names, the other 357 | attributes are boolean flags that by default are ``False``. False means _not_ 358 | to filter. Calling the method only with the ``arg`` '.' would thus return all 359 | packges. THe returned object is a set containing only the names of the sources. 360 | 361 | To perform an action, you get the package names via get_packages. then you get 362 | the WorkingCopies object and call the action you want to perform on this 363 | object. THe WorkingCopies object checks, which working copy is responsible for 364 | the given package and delegates the action to this object. The WorkingCopies 365 | object is also handling threading functionality. 366 | 367 | The ``develop`` object has a ``config`` property. This object can be used to 368 | store configuration of your actions. under ``config.develop`` a dictionary 369 | resides which stores, whether the source with the given key is going to be used 370 | from source checkout. 371 | 372 | 373 | Troubleshooting 374 | =============== 375 | 376 | Dirty SVN 377 | --------- 378 | 379 | You get an error like:: 380 | 381 | ERROR: Can't switch package 'foo' to 'https://example.com/svn/foo/trunk/' because it's dirty. 382 | 383 | If you have not modified the package files under src/foo, then you can check 384 | what's going on with ``status -v``. One common cause is a ``*.egg-info`` folder 385 | which gets generated every time you run buildout and this shows up as an 386 | untracked item in svn status. 387 | 388 | You should add .egg-info to your global Subversion ignores in 389 | ``~/.subversion/config``, like this:: 390 | 391 | global-ignores = *.o *.lo *.la *.al .libs *.so *.so.[0-9]* *.a *.pyc *.pyo *.rej *~ #*# .#* .*.swp .DS_Store *.egg-info 392 | 393 | HTTPS Certificates 394 | ------------------ 395 | 396 | The best way to handle https certificates at the moment, is to accept them 397 | permanently when checking out the source manually. 398 | 399 | Mercurial reports mismatching URL 400 | --------------------------------- 401 | 402 | This happens if you use lp:// URLs from launchpad. The problem is, that hg 403 | reports the actual URL, not the lp shortcut. 404 | 405 | Lockups during checkouts and updates 406 | ------------------------------------ 407 | 408 | Especially on multicore machines, there is an issue that you can get lockups 409 | because of the parallel checkouts. You can configure the number of threads used 410 | for this in ``.mr.developer.cfg`` in the buildout root of your project or 411 | globally in ``~/.buildout/mr.developer.cfg`` through the ``threads`` option 412 | in the ``[mr.developer]`` section or in your buildout in the ``buildout`` 413 | section with the ``mr.developer-threads`` option. Setting it to ``1`` should 414 | fix these issues, but this disables parallel checkouts and makes the process a 415 | bit slower. 416 | 417 | Also, if you have `ControlPersist` in your local ssh config, and you 418 | have a source checkout that uses ssh (for example 419 | ``git@github.com:...``) the checkout or update may work fine, but the 420 | ssh connection may stay open and ``mr.developer`` cannot exit because 421 | it waits for the ssh process to finish. 422 | -------------------------------------------------------------------------------- /src/mr/developer/common.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import pkg_resources 4 | import platform 5 | try: 6 | import queue 7 | except ImportError: 8 | import Queue as queue 9 | import re 10 | import subprocess 11 | import six 12 | import sys 13 | import threading 14 | if sys.version_info < (3, ): 15 | from ConfigParser import RawConfigParser 16 | else: 17 | from configparser import RawConfigParser 18 | 19 | 20 | logger = logging.getLogger("mr.developer") 21 | 22 | 23 | def print_stderr(s): 24 | sys.stderr.write(s) 25 | sys.stderr.write('\n') 26 | sys.stderr.flush() 27 | 28 | 29 | try: 30 | advance_iterator = next 31 | except NameError: 32 | def advance_iterator(it): 33 | return it.next() 34 | 35 | try: 36 | raw_input = raw_input 37 | except NameError: 38 | raw_input = input 39 | 40 | 41 | # shameless copy from 42 | # http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python 43 | def which(name_root, default=None): 44 | def is_exe(fpath): 45 | return os.path.exists(fpath) and os.access(fpath, os.X_OK) 46 | 47 | if platform.system() == 'Windows': 48 | # http://www.voidspace.org.uk/python/articles/command_line.shtml#pathext 49 | pathext = os.environ['PATHEXT'] 50 | # example: ['.py', '.pyc', '.pyo', '.pyw', '.COM', '.EXE', '.BAT', '.CMD'] 51 | names = [name_root + ext for ext in pathext.split(';')] 52 | else: 53 | names = [name_root] 54 | 55 | for name in names: 56 | for path in os.environ["PATH"].split(os.pathsep): 57 | exe_file = os.path.join(path, name) 58 | if is_exe(exe_file): 59 | return exe_file 60 | 61 | if default is not None: 62 | return default 63 | 64 | logger.error("Cannot find executable %s in PATH", name_root) 65 | sys.exit(1) 66 | 67 | 68 | def version_sorted(inp, *args, **kwargs): 69 | """ 70 | Sorts components versions, it means that numeric parts of version 71 | treats as numeric and string as string. 72 | 73 | Eg.: version-1-0-1 < version-1-0-2 < version-1-0-10 74 | """ 75 | num_reg = re.compile(r'([0-9]+)') 76 | 77 | def int_str(val): 78 | try: 79 | return int(val) 80 | except ValueError: 81 | return val 82 | 83 | def split_item(item): 84 | return tuple([int_str(j) for j in num_reg.split(item)]) 85 | 86 | def join_item(item): 87 | return ''.join([str(j) for j in item]) 88 | 89 | output = [split_item(i) for i in inp] 90 | return [join_item(i) for i in sorted(output, *args, **kwargs)] 91 | 92 | 93 | def memoize(f, _marker=[]): 94 | def g(*args, **kwargs): 95 | name = '_memoize_%s' % f.__name__ 96 | value = getattr(args[0], name, _marker) 97 | if value is _marker: 98 | value = f(*args, **kwargs) 99 | setattr(args[0], name, value) 100 | return value 101 | return g 102 | 103 | 104 | class WCError(Exception): 105 | """ A working copy error. """ 106 | 107 | 108 | class BaseWorkingCopy(object): 109 | def __init__(self, source): 110 | self._output = [] 111 | self.output = self._output.append 112 | self.source = source 113 | 114 | def should_update(self, **kwargs): 115 | offline = kwargs.get('offline', False) 116 | if offline: 117 | return False 118 | update = self.source.get('update', kwargs.get('update', False)) 119 | if not isinstance(update, bool): 120 | if update.lower() in ('true', 'yes'): 121 | update = True 122 | elif update.lower() in ('false', 'no'): 123 | update = False 124 | else: 125 | raise ValueError("Unknown value for 'update': %s" % update) 126 | return update 127 | 128 | 129 | def yesno(question, default=True, all=True): 130 | if default: 131 | question = "%s [Yes/no" % question 132 | answers = { 133 | False: ('n', 'no'), 134 | True: ('', 'y', 'yes'), 135 | } 136 | else: 137 | question = "%s [yes/No" % question 138 | answers = { 139 | False: ('', 'n', 'no'), 140 | True: ('y', 'yes'), 141 | } 142 | if all: 143 | answers['all'] = ('a', 'all') 144 | question = "%s/all] " % question 145 | else: 146 | question = "%s] " % question 147 | while 1: 148 | answer = raw_input(question).lower() 149 | for option in answers: 150 | if answer in answers[option]: 151 | return option 152 | if all: 153 | print_stderr("You have to answer with y, yes, n, no, a or all.") 154 | else: 155 | print_stderr("You have to answer with y, yes, n or no.") 156 | 157 | 158 | main_lock = input_lock = output_lock = threading.RLock() 159 | 160 | 161 | def worker(working_copies, the_queue): 162 | while True: 163 | if working_copies.errors: 164 | return 165 | try: 166 | wc, action, kwargs = the_queue.get_nowait() 167 | except queue.Empty: 168 | return 169 | try: 170 | output = action(**kwargs) 171 | except WCError: 172 | output_lock.acquire() 173 | for lvl, msg in wc._output: 174 | lvl(msg) 175 | for line in sys.exc_info()[1].args[0].split('\n'): 176 | logger.error(line) 177 | working_copies.errors = True 178 | output_lock.release() 179 | else: 180 | output_lock.acquire() 181 | 182 | # See GitHub issue # 210 183 | # wc._output is a list containing n-length tuples which are messages from the thread. 184 | # each tuple (item) first position is a logger function 185 | # the rest of the tuple is the message. 186 | 187 | # In cases where the message tuple has more than 2 elements in it 188 | # (logger, message, message, ... ) 189 | # then all messages are joined. 190 | for item in wc._output: 191 | lvl = item[0] 192 | msg = ','.join(item[1:]) 193 | lvl(msg) 194 | 195 | if kwargs.get('verbose', False) and output is not None and output.strip(): 196 | if six.PY3 and isinstance(output, six.binary_type): 197 | output = output.decode('utf8') 198 | print(output) 199 | output_lock.release() 200 | 201 | 202 | _workingcopytypes = None 203 | 204 | 205 | def get_workingcopytypes(): 206 | global _workingcopytypes 207 | if _workingcopytypes is not None: 208 | return _workingcopytypes 209 | group = 'mr.developer.workingcopytypes' 210 | _workingcopytypes = {} 211 | addons = {} 212 | for entrypoint in pkg_resources.iter_entry_points(group=group): 213 | key = entrypoint.name 214 | workingcopytype = entrypoint.load() 215 | if entrypoint.dist.project_name == 'mr.developer': 216 | _workingcopytypes[key] = workingcopytype 217 | else: 218 | if key in addons: 219 | logger.error("There already is a working copy type addon registered for '%s'.", key) 220 | sys.exit(1) 221 | logger.info("Overwriting '%s' with addon from '%s'.", key, entrypoint.dist.project_name) 222 | addons[key] = workingcopytype 223 | _workingcopytypes.update(addons) 224 | return _workingcopytypes 225 | 226 | 227 | def get_commands(): 228 | commands = {} 229 | group = 'mr.developer.commands' 230 | addons = {} 231 | for entrypoint in pkg_resources.iter_entry_points(group=group): 232 | key = entrypoint.name 233 | command = entrypoint.load() 234 | if entrypoint.dist.project_name == 'mr.developer': 235 | commands[key] = command 236 | else: 237 | if key in addons: 238 | logger.error('There already is a command addon registered for "%s".', key) 239 | sys.exit(1) 240 | logger.info('Overwriting "%s" with addon from "%s".', 241 | key, entrypoint.dist.project_name) 242 | addons[key] = command 243 | commands.update(addons) 244 | return commands.values() 245 | 246 | 247 | class WorkingCopies(object): 248 | def __init__(self, sources, threads=5): 249 | self.sources = sources 250 | self.threads = threads 251 | self.errors = False 252 | self.workingcopytypes = get_workingcopytypes() 253 | 254 | def process(self, the_queue): 255 | if self.threads < 2: 256 | worker(self, the_queue) 257 | else: 258 | if sys.version_info < (2, 6): 259 | # work around a race condition in subprocess 260 | _old_subprocess_cleanup = subprocess._cleanup 261 | 262 | def _cleanup(): 263 | pass 264 | 265 | subprocess._cleanup = _cleanup 266 | 267 | threads = [] 268 | 269 | for i in range(self.threads): 270 | thread = threading.Thread(target=worker, args=(self, the_queue)) 271 | thread.start() 272 | threads.append(thread) 273 | for thread in threads: 274 | thread.join() 275 | if sys.version_info < (2, 6): 276 | subprocess._cleanup = _old_subprocess_cleanup 277 | subprocess._cleanup() 278 | 279 | if self.errors: 280 | logger.error("There have been errors, see messages above.") 281 | sys.exit(1) 282 | 283 | def checkout(self, packages, **kwargs): 284 | the_queue = queue.Queue() 285 | if 'update' in kwargs: 286 | if isinstance(kwargs['update'], bool): 287 | pass 288 | elif kwargs['update'].lower() in ('true', 'yes', 'on', 'force'): 289 | if kwargs['update'].lower() == 'force': 290 | kwargs['force'] = True 291 | kwargs['update'] = True 292 | elif kwargs['update'].lower() in ('false', 'no', 'off'): 293 | kwargs['update'] = False 294 | else: 295 | logger.error("Unknown value '%s' for always-checkout option." % kwargs['update']) 296 | sys.exit(1) 297 | kwargs.setdefault('submodules', 'always') 298 | if kwargs['submodules'] in ['always', 'never', 'checkout']: 299 | pass 300 | else: 301 | logger.error("Unknown value '%s' for update-git-submodules option." % kwargs['submodules']) 302 | sys.exit(1) 303 | for name in packages: 304 | kw = kwargs.copy() 305 | if name not in self.sources: 306 | logger.error("Checkout failed. No source defined for '%s'." % name) 307 | sys.exit(1) 308 | source = self.sources[name] 309 | kind = source['kind'] 310 | wc = self.workingcopytypes.get(kind)(source) 311 | if wc is None: 312 | logger.error("Unknown repository type '%s'." % kind) 313 | sys.exit(1) 314 | update = wc.should_update(**kwargs) 315 | if not source.exists(): 316 | pass 317 | elif os.path.islink(source['path']): 318 | logger.info("Skipped update of linked '%s'." % name) 319 | continue 320 | elif update and wc.status() != 'clean' and not kw.get('force', False): 321 | print_stderr("The package '%s' is dirty." % name) 322 | answer = yesno("Do you want to update it anyway?", default=False, all=True) 323 | if answer: 324 | kw['force'] = True 325 | if answer == 'all': 326 | kwargs['force'] = True 327 | else: 328 | logger.info("Skipped update of '%s'." % name) 329 | continue 330 | logger.info("Queued '%s' for checkout.", name) 331 | the_queue.put_nowait((wc, wc.checkout, kw)) 332 | self.process(the_queue) 333 | 334 | def matches(self, source): 335 | name = source['name'] 336 | if name not in self.sources: 337 | logger.error("Checkout failed. No source defined for '%s'." % name) 338 | sys.exit(1) 339 | source = self.sources[name] 340 | try: 341 | kind = source['kind'] 342 | wc = self.workingcopytypes.get(kind)(source) 343 | if wc is None: 344 | logger.error("Unknown repository type '%s'." % kind) 345 | sys.exit(1) 346 | return wc.matches() 347 | except WCError: 348 | for line in sys.exc_info()[1].args[0].split('\n'): 349 | logger.error(line) 350 | sys.exit(1) 351 | 352 | def status(self, source, **kwargs): 353 | name = source['name'] 354 | if name not in self.sources: 355 | logger.error("Status failed. No source defined for '%s'." % name) 356 | sys.exit(1) 357 | source = self.sources[name] 358 | try: 359 | kind = source['kind'] 360 | wc = self.workingcopytypes.get(kind)(source) 361 | if wc is None: 362 | logger.error("Unknown repository type '%s'." % kind) 363 | sys.exit(1) 364 | return wc.status(**kwargs) 365 | except WCError: 366 | for line in sys.exc_info()[1].args[0].split('\n'): 367 | logger.error(line) 368 | sys.exit(1) 369 | 370 | def update(self, packages, **kwargs): 371 | the_queue = queue.Queue() 372 | for name in packages: 373 | kw = kwargs.copy() 374 | if name not in self.sources: 375 | continue 376 | source = self.sources[name] 377 | kind = source['kind'] 378 | wc = self.workingcopytypes.get(kind)(source) 379 | if wc is None: 380 | logger.error("Unknown repository type '%s'." % kind) 381 | sys.exit(1) 382 | if wc.status() != 'clean' and not kw.get('force', False): 383 | print_stderr("The package '%s' is dirty." % name) 384 | answer = yesno("Do you want to update it anyway?", default=False, all=True) 385 | if answer: 386 | kw['force'] = True 387 | if answer == 'all': 388 | kwargs['force'] = True 389 | else: 390 | logger.info("Skipped update of '%s'." % name) 391 | continue 392 | logger.info("Queued '%s' for update.", name) 393 | the_queue.put_nowait((wc, wc.update, kw)) 394 | self.process(the_queue) 395 | 396 | 397 | def parse_buildout_args(args): 398 | settings = dict( 399 | config_file='buildout.cfg', 400 | verbosity=0, 401 | options=[], 402 | windows_restart=False, 403 | user_defaults=True, 404 | debug=False, 405 | ) 406 | options = [] 407 | version = pkg_resources.get_distribution("zc.buildout").version 408 | if tuple(version.split('.')[:2]) <= ('1', '4'): 409 | option_str = 'vqhWUoOnNDA' 410 | else: 411 | option_str = 'vqhWUoOnNDAs' 412 | while args: 413 | if args[0][0] == '-': 414 | op = orig_op = args.pop(0) 415 | op = op[1:] 416 | while op and op[0] in option_str: 417 | if op[0] == 'v': 418 | settings['verbosity'] = settings['verbosity'] + 10 419 | elif op[0] == 'q': 420 | settings['verbosity'] = settings['verbosity'] - 10 421 | elif op[0] == 'W': 422 | settings['windows_restart'] = True 423 | elif op[0] == 'U': 424 | settings['user_defaults'] = False 425 | elif op[0] == 'o': 426 | options.append(('buildout', 'offline', 'true')) 427 | elif op[0] == 'O': 428 | options.append(('buildout', 'offline', 'false')) 429 | elif op[0] == 'n': 430 | options.append(('buildout', 'newest', 'true')) 431 | elif op[0] == 'N': 432 | options.append(('buildout', 'newest', 'false')) 433 | elif op[0] == 'D': 434 | settings['debug'] = True 435 | elif op[0] == 's': 436 | settings['ignore_broken_dash_s'] = True 437 | else: 438 | raise ValueError("Unkown option '%s'." % op[0]) 439 | op = op[1:] 440 | 441 | if op[:1] in ('c', 't'): 442 | op_ = op[:1] 443 | op = op[1:] 444 | 445 | if op_ == 'c': 446 | if op: 447 | settings['config_file'] = op 448 | else: 449 | if args: 450 | settings['config_file'] = args.pop(0) 451 | else: 452 | raise ValueError("No file name specified for option", orig_op) 453 | elif op_ == 't': 454 | try: 455 | int(args.pop(0)) 456 | except IndexError: 457 | raise ValueError("No timeout value specified for option", orig_op) 458 | except ValueError: 459 | raise ValueError("No timeout value must be numeric", orig_op) 460 | settings['socket_timeout'] = op 461 | elif op: 462 | if orig_op == '--help': 463 | return 'help' 464 | raise ValueError("Invalid option", '-' + op[0]) 465 | elif '=' in args[0]: 466 | option, value = args.pop(0).split('=', 1) 467 | parts = option.split(':') 468 | if len(parts) == 2: 469 | section, option = parts 470 | elif len(parts) == 1: 471 | section = 'buildout' 472 | else: 473 | raise ValueError('Invalid option:', option) 474 | options.append((section.strip(), option.strip(), value.strip())) 475 | else: 476 | # We've run out of command-line options and option assignnemnts 477 | # The rest should be commands, so we'll stop here 478 | break 479 | return options, settings, args 480 | 481 | 482 | class Rewrite(object): 483 | _matcher = re.compile(r"(?P